diff --git a/.github/snippet-bot.yml b/.github/snippet-bot.yml new file mode 100644 index 00000000..e69de29b diff --git a/.gitignore b/.gitignore index b87e1ed5..b9daa52f 100644 --- a/.gitignore +++ b/.gitignore @@ -46,6 +46,7 @@ pip-log.txt # Built documentation docs/_build bigquery/docs/generated +docs.metadata # Virtual environment env/ @@ -57,4 +58,4 @@ system_tests/local_test_setup # Make sure a generated file isn't accidentally committed. pylintrc -pylintrc.test \ No newline at end of file +pylintrc.test diff --git a/.kokoro/build.sh b/.kokoro/build.sh index d76d29f6..185e4aaa 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -36,4 +36,10 @@ python3.6 -m pip uninstall --yes --quiet nox-automation python3.6 -m pip install --upgrade --quiet nox python3.6 -m nox --version -python3.6 -m nox +# If NOX_SESSION is set, it only runs the specified session, +# otherwise run all the sessions. +if [[ -n "${NOX_SESSION:-}" ]]; then + python3.6 -m nox -s "${NOX_SESSION:-}" +else + python3.6 -m nox +fi diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile new file mode 100644 index 00000000..412b0b56 --- /dev/null +++ b/.kokoro/docker/docs/Dockerfile @@ -0,0 +1,98 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ubuntu:20.04 + +ENV DEBIAN_FRONTEND noninteractive + +# Ensure local Python is preferred over distribution Python. +ENV PATH /usr/local/bin:$PATH + +# Install dependencies. +RUN apt-get update \ + && apt-get install -y --no-install-recommends \ + apt-transport-https \ + build-essential \ + ca-certificates \ + curl \ + dirmngr \ + git \ + gpg-agent \ + graphviz \ + libbz2-dev \ + libdb5.3-dev \ + libexpat1-dev \ + libffi-dev \ + liblzma-dev \ + libreadline-dev \ + libsnappy-dev \ + libssl-dev \ + libsqlite3-dev \ + portaudio19-dev \ + redis-server \ + software-properties-common \ + ssh \ + sudo \ + tcl \ + tcl-dev \ + tk \ + tk-dev \ + uuid-dev \ + wget \ + zlib1g-dev \ + && add-apt-repository universe \ + && apt-get update \ + && apt-get -y install jq \ + && apt-get clean autoclean \ + && apt-get autoremove -y \ + && rm -rf /var/lib/apt/lists/* \ + && rm -f /var/cache/apt/archives/*.deb + + +COPY fetch_gpg_keys.sh /tmp +# Install the desired versions of Python. +RUN set -ex \ + && export GNUPGHOME="$(mktemp -d)" \ + && echo "disable-ipv6" >> "${GNUPGHOME}/dirmngr.conf" \ + && /tmp/fetch_gpg_keys.sh \ + && for PYTHON_VERSION in 3.7.8 3.8.5; do \ + wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz" \ + && wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz.asc "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz.asc" \ + && gpg --batch --verify python-${PYTHON_VERSION}.tar.xz.asc python-${PYTHON_VERSION}.tar.xz \ + && rm -r python-${PYTHON_VERSION}.tar.xz.asc \ + && mkdir -p /usr/src/python-${PYTHON_VERSION} \ + && tar -xJC /usr/src/python-${PYTHON_VERSION} --strip-components=1 -f python-${PYTHON_VERSION}.tar.xz \ + && rm python-${PYTHON_VERSION}.tar.xz \ + && cd /usr/src/python-${PYTHON_VERSION} \ + && ./configure \ + --enable-shared \ + # This works only on Python 2.7 and throws a warning on every other + # version, but seems otherwise harmless. + --enable-unicode=ucs4 \ + --with-system-ffi \ + --without-ensurepip \ + && make -j$(nproc) \ + && make install \ + && ldconfig \ + ; done \ + && rm -rf "${GNUPGHOME}" \ + && rm -rf /usr/src/python* \ + && rm -rf ~/.cache/ + +RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ + && python3.7 /tmp/get-pip.py \ + && python3.8 /tmp/get-pip.py \ + && rm /tmp/get-pip.py + +CMD ["python3.7"] diff --git a/.kokoro/docker/docs/fetch_gpg_keys.sh b/.kokoro/docker/docs/fetch_gpg_keys.sh new file mode 100755 index 00000000..d653dd86 --- /dev/null +++ b/.kokoro/docker/docs/fetch_gpg_keys.sh @@ -0,0 +1,45 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A script to fetch gpg keys with retry. +# Avoid jinja parsing the file. +# + +function retry { + if [[ "${#}" -le 1 ]]; then + echo "Usage: ${0} retry_count commands.." + exit 1 + fi + local retries=${1} + local command="${@:2}" + until [[ "${retries}" -le 0 ]]; do + $command && return 0 + if [[ $? -ne 0 ]]; then + echo "command failed, retrying" + ((retries--)) + fi + done + return 1 +} + +# 3.6.9, 3.7.5 (Ned Deily) +retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ + 0D96DF4D4110E5C43FBFB17F2D347EA6AA65421D + +# 3.8.0 (Łukasz Langa) +retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \ + E3FF2839C048B25C084DEBE9B26995E310250568 + +# diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg index 0e6863e9..5b751977 100644 --- a/.kokoro/docs/common.cfg +++ b/.kokoro/docs/common.cfg @@ -11,12 +11,12 @@ action { gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" # Use the trampoline script to run in docker. -build_file: "python-cloudbuild/.kokoro/trampoline.sh" +build_file: "python-cloudbuild/.kokoro/trampoline_v2.sh" # Configure the docker image for kokoro-trampoline. env_vars: { key: "TRAMPOLINE_IMAGE" - value: "gcr.io/cloud-devrel-kokoro-resources/python-multi" + value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs" } env_vars: { key: "TRAMPOLINE_BUILD_FILE" @@ -28,6 +28,23 @@ env_vars: { value: "docs-staging" } +env_vars: { + key: "V2_STAGING_BUCKET" + value: "docs-staging-v2" +} + +# It will upload the docker image after successful builds. +env_vars: { + key: "TRAMPOLINE_IMAGE_UPLOAD" + value: "true" +} + +# It will always build the docker image. +env_vars: { + key: "TRAMPOLINE_DOCKERFILE" + value: ".kokoro/docker/docs/Dockerfile" +} + # Fetch the token needed for reporting release status to GitHub before_action { fetch_keystore { diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg new file mode 100644 index 00000000..11181078 --- /dev/null +++ b/.kokoro/docs/docs-presubmit.cfg @@ -0,0 +1,17 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "STAGING_BUCKET" + value: "gcloud-python-test" +} + +env_vars: { + key: "V2_STAGING_BUCKET" + value: "gcloud-python-test" +} + +# We only upload the image in the main `docs` build. +env_vars: { + key: "TRAMPOLINE_IMAGE_UPLOAD" + value: "false" +} diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh new file mode 100755 index 00000000..f5251425 --- /dev/null +++ b/.kokoro/populate-secrets.sh @@ -0,0 +1,43 @@ +#!/bin/bash +# Copyright 2020 Google LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;} +function msg { println "$*" >&2 ;} +function println { printf '%s\n' "$(now) $*" ;} + + +# Populates requested secrets set in SECRET_MANAGER_KEYS from service account: +# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com +SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" +msg "Creating folder on disk for secrets: ${SECRET_LOCATION}" +mkdir -p ${SECRET_LOCATION} +for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g") +do + msg "Retrieving secret ${key}" + docker run --entrypoint=gcloud \ + --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \ + gcr.io/google.com/cloudsdktool/cloud-sdk \ + secrets versions access latest \ + --project cloud-devrel-kokoro-resources \ + --secret ${key} > \ + "${SECRET_LOCATION}/${key}" + if [[ $? == 0 ]]; then + msg "Secret written to ${SECRET_LOCATION}/${key}" + else + msg "Error retrieving secret ${key}" + fi +done diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh index 6877cdd5..8acb14e8 100755 --- a/.kokoro/publish-docs.sh +++ b/.kokoro/publish-docs.sh @@ -18,26 +18,16 @@ set -eo pipefail # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 -cd github/python-cloudbuild - -# Remove old nox -python3.6 -m pip uninstall --yes --quiet nox-automation +export PATH="${HOME}/.local/bin:${PATH}" # Install nox -python3.6 -m pip install --upgrade --quiet nox -python3.6 -m nox --version +python3 -m pip install --user --upgrade --quiet nox +python3 -m nox --version # build docs nox -s docs -python3 -m pip install gcp-docuploader - -# install a json parser -sudo apt-get update -sudo apt-get -y install software-properties-common -sudo add-apt-repository universe -sudo apt-get update -sudo apt-get -y install jq +python3 -m pip install --user gcp-docuploader # create metadata python3 -m docuploader create-metadata \ @@ -52,4 +42,23 @@ python3 -m docuploader create-metadata \ cat docs.metadata # upload docs -python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket docs-staging +python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}" + + +# docfx yaml files +nox -s docfx + +# create metadata. +python3 -m docuploader create-metadata \ + --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \ + --version=$(python3 setup.py --version) \ + --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \ + --distribution-name=$(python3 setup.py --name) \ + --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \ + --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \ + --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json) + +cat docs.metadata + +# upload docs +python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}" diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index dfcae22e..41909982 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -23,42 +23,18 @@ env_vars: { value: "github/python-cloudbuild/.kokoro/release.sh" } -# Fetch the token needed for reporting release status to GitHub -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "yoshi-automation-github-key" - } - } -} - -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google_cloud_pypi_password" - } - } -} - -# Fetch magictoken to use with Magic Github Proxy -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "releasetool-magictoken" - } - } +# Fetch PyPI password +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google_cloud_pypi_password" + } + } } -# Fetch api key to use with Magic Github Proxy -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "magic-github-proxy-api-key" - } - } -} +# Tokens needed to report release status back to GitHub +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg index 335f2379..b73a217c 100644 --- a/.kokoro/samples/python3.6/common.cfg +++ b/.kokoro/samples/python3.6/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.6" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py36" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-cloudbuild/.kokoro/test-samples.sh" diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg index a9fec387..36ba440c 100644 --- a/.kokoro/samples/python3.7/common.cfg +++ b/.kokoro/samples/python3.7/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.7" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py37" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-cloudbuild/.kokoro/test-samples.sh" diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg index eb2f3c25..5a0de211 100644 --- a/.kokoro/samples/python3.8/common.cfg +++ b/.kokoro/samples/python3.8/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.8" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py38" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-cloudbuild/.kokoro/test-samples.sh" diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh index e560d138..d42f5129 100755 --- a/.kokoro/test-samples.sh +++ b/.kokoro/test-samples.sh @@ -28,6 +28,12 @@ if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then git checkout $LATEST_RELEASE fi +# Exit early if samples directory doesn't exist +if [ ! -d "./samples" ]; then + echo "No tests run. `./samples` not found" + exit 0 +fi + # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -101,4 +107,4 @@ cd "$ROOT" # Workaround for Kokoro permissions issue: delete secrets rm testing/{test-env.sh,client-secrets.json,service-account.json} -exit "$RTN" \ No newline at end of file +exit "$RTN" diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh index e8c4251f..f39236e9 100755 --- a/.kokoro/trampoline.sh +++ b/.kokoro/trampoline.sh @@ -15,9 +15,14 @@ set -eo pipefail -python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" || ret_code=$? +# Always run the cleanup script, regardless of the success of bouncing into +# the container. +function cleanup() { + chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + echo "cleanup"; +} +trap cleanup EXIT -chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh -${KOKORO_GFILE_DIR}/trampoline_cleanup.sh || true - -exit ${ret_code} +$(dirname $0)/populate-secrets.sh # Secret Manager secrets. +python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" \ No newline at end of file diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh new file mode 100755 index 00000000..719bcd5b --- /dev/null +++ b/.kokoro/trampoline_v2.sh @@ -0,0 +1,487 @@ +#!/usr/bin/env bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# trampoline_v2.sh +# +# This script does 3 things. +# +# 1. Prepare the Docker image for the test +# 2. Run the Docker with appropriate flags to run the test +# 3. Upload the newly built Docker image +# +# in a way that is somewhat compatible with trampoline_v1. +# +# To run this script, first download few files from gcs to /dev/shm. +# (/dev/shm is passed into the container as KOKORO_GFILE_DIR). +# +# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm +# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm +# +# Then run the script. +# .kokoro/trampoline_v2.sh +# +# These environment variables are required: +# TRAMPOLINE_IMAGE: The docker image to use. +# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile. +# +# You can optionally change these environment variables: +# TRAMPOLINE_IMAGE_UPLOAD: +# (true|false): Whether to upload the Docker image after the +# successful builds. +# TRAMPOLINE_BUILD_FILE: The script to run in the docker container. +# TRAMPOLINE_WORKSPACE: The workspace path in the docker container. +# Defaults to /workspace. +# Potentially there are some repo specific envvars in .trampolinerc in +# the project root. + + +set -euo pipefail + +TRAMPOLINE_VERSION="2.0.5" + +if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then + readonly IO_COLOR_RED="$(tput setaf 1)" + readonly IO_COLOR_GREEN="$(tput setaf 2)" + readonly IO_COLOR_YELLOW="$(tput setaf 3)" + readonly IO_COLOR_RESET="$(tput sgr0)" +else + readonly IO_COLOR_RED="" + readonly IO_COLOR_GREEN="" + readonly IO_COLOR_YELLOW="" + readonly IO_COLOR_RESET="" +fi + +function function_exists { + [ $(LC_ALL=C type -t $1)"" == "function" ] +} + +# Logs a message using the given color. The first argument must be one +# of the IO_COLOR_* variables defined above, such as +# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the +# given color. The log message will also have an RFC-3339 timestamp +# prepended (in UTC). You can disable the color output by setting +# TERM=vt100. +function log_impl() { + local color="$1" + shift + local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")" + echo "================================================================" + echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}" + echo "================================================================" +} + +# Logs the given message with normal coloring and a timestamp. +function log() { + log_impl "${IO_COLOR_RESET}" "$@" +} + +# Logs the given message in green with a timestamp. +function log_green() { + log_impl "${IO_COLOR_GREEN}" "$@" +} + +# Logs the given message in yellow with a timestamp. +function log_yellow() { + log_impl "${IO_COLOR_YELLOW}" "$@" +} + +# Logs the given message in red with a timestamp. +function log_red() { + log_impl "${IO_COLOR_RED}" "$@" +} + +readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX) +readonly tmphome="${tmpdir}/h" +mkdir -p "${tmphome}" + +function cleanup() { + rm -rf "${tmpdir}" +} +trap cleanup EXIT + +RUNNING_IN_CI="${RUNNING_IN_CI:-false}" + +# The workspace in the container, defaults to /workspace. +TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}" + +pass_down_envvars=( + # TRAMPOLINE_V2 variables. + # Tells scripts whether they are running as part of CI or not. + "RUNNING_IN_CI" + # Indicates which CI system we're in. + "TRAMPOLINE_CI" + # Indicates the version of the script. + "TRAMPOLINE_VERSION" +) + +log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}" + +# Detect which CI systems we're in. If we're in any of the CI systems +# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be +# the name of the CI system. Both envvars will be passing down to the +# container for telling which CI system we're in. +if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then + # descriptive env var for indicating it's on CI. + RUNNING_IN_CI="true" + TRAMPOLINE_CI="kokoro" + if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then + if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then + log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting." + exit 1 + fi + # This service account will be activated later. + TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" + else + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + gcloud auth list + fi + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet + fi + pass_down_envvars+=( + # KOKORO dynamic variables. + "KOKORO_BUILD_NUMBER" + "KOKORO_BUILD_ID" + "KOKORO_JOB_NAME" + "KOKORO_GIT_COMMIT" + "KOKORO_GITHUB_COMMIT" + "KOKORO_GITHUB_PULL_REQUEST_NUMBER" + "KOKORO_GITHUB_PULL_REQUEST_COMMIT" + # For Build Cop Bot + "KOKORO_GITHUB_COMMIT_URL" + "KOKORO_GITHUB_PULL_REQUEST_URL" + ) +elif [[ "${TRAVIS:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="travis" + pass_down_envvars+=( + "TRAVIS_BRANCH" + "TRAVIS_BUILD_ID" + "TRAVIS_BUILD_NUMBER" + "TRAVIS_BUILD_WEB_URL" + "TRAVIS_COMMIT" + "TRAVIS_COMMIT_MESSAGE" + "TRAVIS_COMMIT_RANGE" + "TRAVIS_JOB_NAME" + "TRAVIS_JOB_NUMBER" + "TRAVIS_JOB_WEB_URL" + "TRAVIS_PULL_REQUEST" + "TRAVIS_PULL_REQUEST_BRANCH" + "TRAVIS_PULL_REQUEST_SHA" + "TRAVIS_PULL_REQUEST_SLUG" + "TRAVIS_REPO_SLUG" + "TRAVIS_SECURE_ENV_VARS" + "TRAVIS_TAG" + ) +elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="github-workflow" + pass_down_envvars+=( + "GITHUB_WORKFLOW" + "GITHUB_RUN_ID" + "GITHUB_RUN_NUMBER" + "GITHUB_ACTION" + "GITHUB_ACTIONS" + "GITHUB_ACTOR" + "GITHUB_REPOSITORY" + "GITHUB_EVENT_NAME" + "GITHUB_EVENT_PATH" + "GITHUB_SHA" + "GITHUB_REF" + "GITHUB_HEAD_REF" + "GITHUB_BASE_REF" + ) +elif [[ "${CIRCLECI:-}" == "true" ]]; then + RUNNING_IN_CI="true" + TRAMPOLINE_CI="circleci" + pass_down_envvars+=( + "CIRCLE_BRANCH" + "CIRCLE_BUILD_NUM" + "CIRCLE_BUILD_URL" + "CIRCLE_COMPARE_URL" + "CIRCLE_JOB" + "CIRCLE_NODE_INDEX" + "CIRCLE_NODE_TOTAL" + "CIRCLE_PREVIOUS_BUILD_NUM" + "CIRCLE_PROJECT_REPONAME" + "CIRCLE_PROJECT_USERNAME" + "CIRCLE_REPOSITORY_URL" + "CIRCLE_SHA1" + "CIRCLE_STAGE" + "CIRCLE_USERNAME" + "CIRCLE_WORKFLOW_ID" + "CIRCLE_WORKFLOW_JOB_ID" + "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS" + "CIRCLE_WORKFLOW_WORKSPACE_ID" + ) +fi + +# Configure the service account for pulling the docker image. +function repo_root() { + local dir="$1" + while [[ ! -d "${dir}/.git" ]]; do + dir="$(dirname "$dir")" + done + echo "${dir}" +} + +# Detect the project root. In CI builds, we assume the script is in +# the git tree and traverse from there, otherwise, traverse from `pwd` +# to find `.git` directory. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + PROGRAM_PATH="$(realpath "$0")" + PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")" + PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")" +else + PROJECT_ROOT="$(repo_root $(pwd))" +fi + +log_yellow "Changing to the project root: ${PROJECT_ROOT}." +cd "${PROJECT_ROOT}" + +# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need +# to use this environment variable in `PROJECT_ROOT`. +if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then + + mkdir -p "${tmpdir}/gcloud" + gcloud_config_dir="${tmpdir}/gcloud" + + log_yellow "Using isolated gcloud config: ${gcloud_config_dir}." + export CLOUDSDK_CONFIG="${gcloud_config_dir}" + + log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication." + gcloud auth activate-service-account \ + --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}" + log_yellow "Configuring Container Registry access" + gcloud auth configure-docker --quiet +fi + +required_envvars=( + # The basic trampoline configurations. + "TRAMPOLINE_IMAGE" + "TRAMPOLINE_BUILD_FILE" +) + +if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then + source "${PROJECT_ROOT}/.trampolinerc" +fi + +log_yellow "Checking environment variables." +for e in "${required_envvars[@]}" +do + if [[ -z "${!e:-}" ]]; then + log "Missing ${e} env var. Aborting." + exit 1 + fi +done + +# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1 +# script: e.g. "github/repo-name/.kokoro/run_tests.sh" +TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}" +log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}" + +# ignore error on docker operations and test execution +set +e + +log_yellow "Preparing Docker image." +# We only download the docker image in CI builds. +if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + # Download the docker image specified by `TRAMPOLINE_IMAGE` + + # We may want to add --max-concurrent-downloads flag. + + log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}." + if docker pull "${TRAMPOLINE_IMAGE}"; then + log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="true" + else + log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}." + has_image="false" + fi +else + # For local run, check if we have the image. + if docker images "${TRAMPOLINE_IMAGE}:latest" | grep "${TRAMPOLINE_IMAGE}"; then + has_image="true" + else + has_image="false" + fi +fi + + +# The default user for a Docker container has uid 0 (root). To avoid +# creating root-owned files in the build directory we tell docker to +# use the current user ID. +user_uid="$(id -u)" +user_gid="$(id -g)" +user_name="$(id -un)" + +# To allow docker in docker, we add the user to the docker group in +# the host os. +docker_gid=$(cut -d: -f3 < <(getent group docker)) + +update_cache="false" +if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then + # Build the Docker image from the source. + context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}") + docker_build_flags=( + "-f" "${TRAMPOLINE_DOCKERFILE}" + "-t" "${TRAMPOLINE_IMAGE}" + "--build-arg" "UID=${user_uid}" + "--build-arg" "USERNAME=${user_name}" + ) + if [[ "${has_image}" == "true" ]]; then + docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}") + fi + + log_yellow "Start building the docker image." + if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then + echo "docker build" "${docker_build_flags[@]}" "${context_dir}" + fi + + # ON CI systems, we want to suppress docker build logs, only + # output the logs when it fails. + if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then + if docker build "${docker_build_flags[@]}" "${context_dir}" \ + > "${tmpdir}/docker_build.log" 2>&1; then + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + cat "${tmpdir}/docker_build.log" + fi + + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + log_yellow "Dumping the build logs:" + cat "${tmpdir}/docker_build.log" + exit 1 + fi + else + if docker build "${docker_build_flags[@]}" "${context_dir}"; then + log_green "Finished building the docker image." + update_cache="true" + else + log_red "Failed to build the Docker image, aborting." + exit 1 + fi + fi +else + if [[ "${has_image}" != "true" ]]; then + log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting." + exit 1 + fi +fi + +# We use an array for the flags so they are easier to document. +docker_flags=( + # Remove the container after it exists. + "--rm" + + # Use the host network. + "--network=host" + + # Run in priviledged mode. We are not using docker for sandboxing or + # isolation, just for packaging our dev tools. + "--privileged" + + # Run the docker script with the user id. Because the docker image gets to + # write in ${PWD} you typically want this to be your user id. + # To allow docker in docker, we need to use docker gid on the host. + "--user" "${user_uid}:${docker_gid}" + + # Pass down the USER. + "--env" "USER=${user_name}" + + # Mount the project directory inside the Docker container. + "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}" + "--workdir" "${TRAMPOLINE_WORKSPACE}" + "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}" + + # Mount the temporary home directory. + "--volume" "${tmphome}:/h" + "--env" "HOME=/h" + + # Allow docker in docker. + "--volume" "/var/run/docker.sock:/var/run/docker.sock" + + # Mount the /tmp so that docker in docker can mount the files + # there correctly. + "--volume" "/tmp:/tmp" + # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR + # TODO(tmatsuo): This part is not portable. + "--env" "TRAMPOLINE_SECRET_DIR=/secrets" + "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile" + "--env" "KOKORO_GFILE_DIR=/secrets/gfile" + "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore" + "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore" +) + +# Add an option for nicer output if the build gets a tty. +if [[ -t 0 ]]; then + docker_flags+=("-it") +fi + +# Passing down env vars +for e in "${pass_down_envvars[@]}" +do + if [[ -n "${!e:-}" ]]; then + docker_flags+=("--env" "${e}=${!e}") + fi +done + +# If arguments are given, all arguments will become the commands run +# in the container, otherwise run TRAMPOLINE_BUILD_FILE. +if [[ $# -ge 1 ]]; then + log_yellow "Running the given commands '" "${@:1}" "' in the container." + readonly commands=("${@:1}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}" +else + log_yellow "Running the tests in a Docker container." + docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}") + if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then + echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" + fi + docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" +fi + + +test_retval=$? + +if [[ ${test_retval} -eq 0 ]]; then + log_green "Build finished with ${test_retval}" +else + log_red "Build finished with ${test_retval}" +fi + +# Only upload it when the test passes. +if [[ "${update_cache}" == "true" ]] && \ + [[ $test_retval == 0 ]] && \ + [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then + log_yellow "Uploading the Docker image." + if docker push "${TRAMPOLINE_IMAGE}"; then + log_green "Finished uploading the Docker image." + else + log_red "Failed uploading the Docker image." + fi + # Call trampoline_after_upload_hook if it's defined. + if function_exists trampoline_after_upload_hook; then + trampoline_after_upload_hook + fi + +fi + +exit "${test_retval}" diff --git a/.trampolinerc b/.trampolinerc new file mode 100644 index 00000000..995ee291 --- /dev/null +++ b/.trampolinerc @@ -0,0 +1,51 @@ +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Template for .trampolinerc + +# Add required env vars here. +required_envvars+=( + "STAGING_BUCKET" + "V2_STAGING_BUCKET" +) + +# Add env vars which are passed down into the container here. +pass_down_envvars+=( + "STAGING_BUCKET" + "V2_STAGING_BUCKET" +) + +# Prevent unintentional override on the default image. +if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \ + [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image." + exit 1 +fi + +# Define the default value if it makes sense. +if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then + TRAMPOLINE_IMAGE_UPLOAD="" +fi + +if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then + TRAMPOLINE_IMAGE="" +fi + +if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then + TRAMPOLINE_DOCKERFILE="" +fi + +if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then + TRAMPOLINE_BUILD_FILE="" +fi diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index b3d1f602..039f4368 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -1,44 +1,95 @@ -# Contributor Code of Conduct +# Code of Conduct -As contributors and maintainers of this project, -and in the interest of fostering an open and welcoming community, -we pledge to respect all people who contribute through reporting issues, -posting feature requests, updating documentation, -submitting pull requests or patches, and other activities. +## Our Pledge -We are committed to making participation in this project -a harassment-free experience for everyone, -regardless of level of experience, gender, gender identity and expression, -sexual orientation, disability, personal appearance, -body size, race, ethnicity, age, religion, or nationality. +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members Examples of unacceptable behavior by participants include: -* The use of sexualized language or imagery -* Personal attacks -* Trolling or insulting/derogatory comments -* Public or private harassment -* Publishing other's private information, -such as physical or electronic -addresses, without explicit permission -* Other unethical or unprofessional conduct. +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject -comments, commits, code, wiki edits, issues, and other contributions -that are not aligned to this Code of Conduct. -By adopting this Code of Conduct, -project maintainers commit themselves to fairly and consistently -applying these principles to every aspect of managing this project. -Project maintainers who do not follow or enforce the Code of Conduct -may be permanently removed from the project team. - -This code of conduct applies both within project spaces and in public spaces -when an individual is representing the project or its community. - -Instances of abusive, harassing, or otherwise unacceptable behavior -may be reported by opening an issue -or contacting one or more of the project maintainers. - -This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0, -available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/) +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 7566ac72..1f4f33e6 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -80,25 +80,6 @@ We use `nox `__ to instrument our tests. .. nox: https://pypi.org/project/nox/ -Note on Editable Installs / Develop Mode -======================================== - -- As mentioned previously, using ``setuptools`` in `develop mode`_ - or a ``pip`` `editable install`_ is not possible with this - library. This is because this library uses `namespace packages`_. - For context see `Issue #2316`_ and the relevant `PyPA issue`_. - - Since ``editable`` / ``develop`` mode can't be used, packages - need to be installed directly. Hence your changes to the source - tree don't get incorporated into the **already installed** - package. - -.. _namespace packages: https://www.python.org/dev/peps/pep-0420/ -.. _Issue #2316: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2316 -.. _PyPA issue: https://github.com/pypa/packaging-problems/issues/12 -.. _develop mode: https://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode -.. _editable install: https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs - ***************************************** I'm getting weird errors... Can you help? ***************************************** diff --git a/docs/cloudbuild_v1/types.rst b/docs/cloudbuild_v1/types.rst index efe8ce72..3fcd1832 100644 --- a/docs/cloudbuild_v1/types.rst +++ b/docs/cloudbuild_v1/types.rst @@ -3,3 +3,4 @@ Types for Google Devtools Cloudbuild v1 API .. automodule:: google.cloud.devtools.cloudbuild_v1.types :members: + :show-inheritance: diff --git a/docs/conf.py b/docs/conf.py index 2d0ebebb..4a604d68 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -20,12 +20,16 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.insert(0, os.path.abspath("..")) +# For plugins that can not read conf.py. +# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85 +sys.path.insert(0, os.path.abspath(".")) + __version__ = "" # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" +needs_sphinx = "1.5.5" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom @@ -35,6 +39,7 @@ "sphinx.ext.autosummary", "sphinx.ext.intersphinx", "sphinx.ext.coverage", + "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", @@ -90,7 +95,12 @@ # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ["_build"] +exclude_patterns = [ + "_build", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", +] # The reST default role (used for this markup: `text`) to use for all # documents. @@ -339,6 +349,7 @@ "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), "grpc": ("https://grpc.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), } diff --git a/google/cloud/devtools/cloudbuild_v1/proto/cloudbuild.proto b/google/cloud/devtools/cloudbuild_v1/proto/cloudbuild.proto index 1f06623f..de8a1de3 100644 --- a/google/cloud/devtools/cloudbuild_v1/proto/cloudbuild.proto +++ b/google/cloud/devtools/cloudbuild_v1/proto/cloudbuild.proto @@ -19,9 +19,11 @@ package google.devtools.cloudbuild.v1; import "google/api/annotations.proto"; import "google/api/client.proto"; import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; import "google/longrunning/operations.proto"; import "google/protobuf/duration.proto"; import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; import "google/protobuf/timestamp.proto"; option go_package = "google.golang.org/genproto/googleapis/devtools/cloudbuild/v1;cloudbuild"; @@ -29,6 +31,14 @@ option java_multiple_files = true; option java_package = "com.google.cloudbuild.v1"; option objc_class_prefix = "GCB"; option ruby_package = "Google::Cloud::Build::V1"; +option (google.api.resource_definition) = { + type: "compute.googleapis.com/Network" + pattern: "projects/{project}/global/networks/{network}" +}; +option (google.api.resource_definition) = { + type: "iam.googleapis.com/ServiceAccount" + pattern: "projects/{project}/serviceAccounts/{service_account}" +}; // Creates and manages builds on Google Cloud Platform. // @@ -52,6 +62,10 @@ service CloudBuild { option (google.api.http) = { post: "/v1/projects/{project_id}/builds" body: "build" + additional_bindings { + post: "/v1/{parent=projects/*/locations/*}/builds" + body: "*" + } }; option (google.api.method_signature) = "project_id,build"; option (google.longrunning.operation_info) = { @@ -67,6 +81,7 @@ service CloudBuild { rpc GetBuild(GetBuildRequest) returns (Build) { option (google.api.http) = { get: "/v1/projects/{project_id}/builds/{id}" + additional_bindings { get: "/v1/{name=projects/*/locations/*/builds/*}" } }; option (google.api.method_signature) = "project_id,id"; } @@ -78,6 +93,7 @@ service CloudBuild { rpc ListBuilds(ListBuildsRequest) returns (ListBuildsResponse) { option (google.api.http) = { get: "/v1/projects/{project_id}/builds" + additional_bindings { get: "/v1/{parent=projects/*/locations/*}/builds" } }; option (google.api.method_signature) = "project_id,filter"; } @@ -87,6 +103,10 @@ service CloudBuild { option (google.api.http) = { post: "/v1/projects/{project_id}/builds/{id}:cancel" body: "*" + additional_bindings { + post: "/v1/{name=projects/*/locations/*/builds/*}:cancel" + body: "*" + } }; option (google.api.method_signature) = "project_id,id"; } @@ -122,6 +142,10 @@ service CloudBuild { option (google.api.http) = { post: "/v1/projects/{project_id}/builds/{id}:retry" body: "*" + additional_bindings { + post: "/v1/{name=projects/*/locations/*/builds/*}:retry" + body: "*" + } }; option (google.api.method_signature) = "project_id,id"; option (google.longrunning.operation_info) = { @@ -228,6 +252,12 @@ service CloudBuild { // Specifies a build to retry. message RetryBuildRequest { + // The name of the `Build` to retry. + // Format: `projects/{project}/locations/{location}/builds/{build}` + string name = 3 [(google.api.resource_reference) = { + type: "cloudbuild.googleapis.com/Build" + }]; + // Required. ID of the project. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; @@ -416,7 +446,7 @@ message BuildStep { // Output only. Stores timing information for pulling this build step's // builder image only. - TimeSpan pull_timing = 13; + TimeSpan pull_timing = 13 [(google.api.field_behavior) = OUTPUT_ONLY]; // Time limit for executing this build step. If not defined, the step has no // time limit and will be allowed to continue to run until either it completes @@ -426,7 +456,7 @@ message BuildStep { // Output only. Status of the build step. At this time, build step status is // only updated on build completion; step status is not updated in real-time // as the build progresses. - Build.Status status = 12; + Build.Status status = 12 [(google.api.field_behavior) = OUTPUT_ONLY]; } // Volume describes a Docker container volume which is mounted into build steps @@ -502,6 +532,12 @@ message ArtifactResult { // resolved from the specified branch or tag. // - $SHORT_SHA: first 7 characters of $REVISION_ID or $COMMIT_SHA. message Build { + option (google.api.resource) = { + type: "cloudbuild.googleapis.com/Build" + pattern: "projects/{project}/builds/{build}" + pattern: "projects/{project}/locations/{location}/builds/{build}" + }; + // Possible status of a build or build step. enum Status { // Status of the build is unknown. @@ -532,6 +568,11 @@ message Build { EXPIRED = 9; } + // Output only. The 'Build' name with format: + // `projects/{project}/locations/{location}/builds/{build}`, where {build} + // is a unique identifier generated by the service. + string name = 45 [(google.api.field_behavior) = OUTPUT_ONLY]; + // Output only. Unique identifier of the build. string id = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; @@ -539,10 +580,10 @@ message Build { string project_id = 16 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Status of the build. - Status status = 2; + Status status = 2 [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. Customer-readable message about the current status. - string status_detail = 24; + string status_detail = 24 [(google.api.field_behavior) = OUTPUT_ONLY]; // The location of the source files to build. Source source = 3; @@ -572,6 +613,8 @@ message Build { // granularity. If this amount of time elapses, work on the build will cease // and the build status will be `TIMEOUT`. // + // `timeout` starts ticking from `startTime`. + // // Default time is ten minutes. google.protobuf.Duration timeout = 12; @@ -605,11 +648,12 @@ message Build { string logs_bucket = 19; // Output only. A permanent fixed identifier for source. - SourceProvenance source_provenance = 21; + SourceProvenance source_provenance = 21 + [(google.api.field_behavior) = OUTPUT_ONLY]; // Output only. The ID of the `BuildTrigger` that triggered this build, if it // was triggered automatically. - string build_trigger_id = 22; + string build_trigger_id = 22 [(google.api.field_behavior) = OUTPUT_ONLY]; // Special options for this build. BuildOptions options = 23; @@ -636,6 +680,15 @@ message Build { // If the build does not specify source or images, // these keys will not be included. map timing = 33 [(google.api.field_behavior) = OUTPUT_ONLY]; + + // IAM service account whose credentials will be used at build runtime. + // Must be of the format `projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}`. + // ACCOUNT can be email address or uniqueId of the service account. + // + // This field is in alpha and is not publicly available. + string service_account = 42 [(google.api.resource_reference) = { + type: "iam.googleapis.com/ServiceAccount" + }]; } // Artifacts produced by a build that should be uploaded upon @@ -656,7 +709,7 @@ message Artifacts { repeated string paths = 2; // Output only. Stores timing information for pushing all artifact objects. - TimeSpan timing = 3; + TimeSpan timing = 3 [(google.api.field_behavior) = OUTPUT_ONLY]; } // A list of images to be pushed upon the successful completion of all build @@ -769,6 +822,12 @@ message Secret { // Request to create a new build. message CreateBuildRequest { + // The parent resource where this build will be created. + // Format: `projects/{project}/locations/{location}` + string parent = 4 [(google.api.resource_reference) = { + child_type: "cloudbuild.googleapis.com/Build" + }]; + // Required. ID of the project. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; @@ -778,6 +837,12 @@ message CreateBuildRequest { // Request to get a build. message GetBuildRequest { + // The name of the `Build` to retrieve. + // Format: `projects/{project}/locations/{location}/builds/{build}` + string name = 4 [(google.api.resource_reference) = { + type: "cloudbuild.googleapis.com/Build" + }]; + // Required. ID of the project. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; @@ -787,6 +852,12 @@ message GetBuildRequest { // Request to list builds. message ListBuildsRequest { + // The parent of the collection of `Builds`. + // Format: `projects/{project}/locations/location` + string parent = 9 [(google.api.resource_reference) = { + child_type: "cloudbuild.googleapis.com/Build" + }]; + // Required. ID of the project. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; @@ -811,6 +882,12 @@ message ListBuildsResponse { // Request to cancel an ongoing build. message CancelBuildRequest { + // The name of the `Build` to retrieve. + // Format: `projects/{project}/locations/{location}/builds/{build}` + string name = 4 [(google.api.resource_reference) = { + type: "cloudbuild.googleapis.com/Build" + }]; + // Required. ID of the project. string project_id = 1 [(google.api.field_behavior) = REQUIRED]; @@ -821,6 +898,11 @@ message CancelBuildRequest { // Configuration for an automated build in response to source repository // changes. message BuildTrigger { + option (google.api.resource) = { + type: "cloudbuild.googleapis.com/BuildTrigger" + pattern: "projects/{project}/triggers/{trigger}" + }; + // Output only. Unique identifier of the trigger. string id = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; @@ -867,12 +949,11 @@ message BuildTrigger { google.protobuf.Timestamp create_time = 5 [(google.api.field_behavior) = OUTPUT_ONLY]; - // If true, the trigger will never result in a build. + // If true, the trigger will never automatically execute a build. bool disabled = 9; // Substitutions for Build resource. The keys must match the following - // regular expression: `^_[A-Z0-9_]+$`.The keys cannot conflict with the - // keys in bindings. + // regular expression: `^_[A-Z0-9_]+$`. map substitutions = 11; // ignored_files and included_files are file glob matches using @@ -948,8 +1029,8 @@ message PullRequestFilter { string branch = 2; } - // Whether to block builds on a "/gcbrun" comment from a repository admin or - // collaborator. + // Configure builds to run whether a repository owner or collaborator need to + // comment `/gcbrun`. CommentControl comment_control = 5; // If true, branches that do NOT match the git_ref will trigger a build. @@ -1050,7 +1131,9 @@ message BuildOptions { VERIFIED = 1; } - // Supported VM sizes. + // Supported Compute Engine machine types. + // For more information, see [Machine + // types](https://cloud.google.com/compute/docs/machine-types). enum MachineType { // Standard machine type. UNSPECIFIED = 0; @@ -1091,11 +1174,23 @@ message BuildOptions { // rely on the default logging behavior as it may change in the future. LOGGING_UNSPECIFIED = 0; - // Stackdriver logging and Cloud Storage logging are enabled. + // Cloud Logging and Cloud Storage logging are enabled. LEGACY = 1; // Only Cloud Storage logging is enabled. GCS_ONLY = 2; + + // This option is the same as CLOUD_LOGGING_ONLY. + STACKDRIVER_ONLY = 3 [deprecated = true]; + + // Only Cloud Logging is enabled. Note that logs for both the Cloud Console + // UI and Cloud SDK are based on Cloud Storage logs, so neither will provide + // logs if this option is chosen. + CLOUD_LOGGING_ONLY = 5; + + // Turn off all logging. No build logs will be captured. + // Next ID: 6 + NONE = 4; } // Requested hash for SourceProvenance. @@ -1117,20 +1212,30 @@ message BuildOptions { // Option to specify behavior when there is an error in the substitution // checks. + // + // NOTE: this is always set to ALLOW_LOOSE for triggered builds and cannot + // be overridden in the build configuration file. SubstitutionOption substitution_option = 4; + // Option to specify whether or not to apply bash style string + // operations to the substitutions. + // + // NOTE: this is always enabled for triggered builds and cannot be + // overridden in the build configuration file. + bool dynamic_substitutions = 17; + // Option to define build log streaming behavior to Google Cloud // Storage. LogStreamingOption log_streaming_option = 5; // Option to specify a `WorkerPool` for the build. - // Format: projects/{project}/workerPools/{workerPool} + // Format: projects/{project}/locations/{location}/workerPools/{workerPool} // // This field is experimental. string worker_pool = 7; - // Option to specify the logging mode, which determines where the logs are - // stored. + // Option to specify the logging mode, which determines if and where build + // logs are stored. LoggingMode logging = 11; // A list of global environment variable definitions that will exist for all diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py index 96421e96..469f1edf 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/async_client.py @@ -28,14 +28,14 @@ from google.auth import credentials # type: ignore from google.oauth2 import service_account # type: ignore -from google.api_core import operation -from google.api_core import operation_async +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore from google.cloud.devtools.cloudbuild_v1.services.cloud_build import pagers from google.cloud.devtools.cloudbuild_v1.types import cloudbuild from google.protobuf import duration_pb2 as duration # type: ignore from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from .transports.base import CloudBuildTransport +from .transports.base import CloudBuildTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import CloudBuildGrpcAsyncIOTransport from .client import CloudBuildClient @@ -56,9 +56,50 @@ class CloudBuildAsyncClient: DEFAULT_ENDPOINT = CloudBuildClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = CloudBuildClient.DEFAULT_MTLS_ENDPOINT + build_path = staticmethod(CloudBuildClient.build_path) + parse_build_path = staticmethod(CloudBuildClient.parse_build_path) + build_trigger_path = staticmethod(CloudBuildClient.build_trigger_path) + parse_build_trigger_path = staticmethod(CloudBuildClient.parse_build_trigger_path) + service_account_path = staticmethod(CloudBuildClient.service_account_path) + parse_service_account_path = staticmethod( + CloudBuildClient.parse_service_account_path + ) + + common_billing_account_path = staticmethod( + CloudBuildClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + CloudBuildClient.parse_common_billing_account_path + ) + + common_folder_path = staticmethod(CloudBuildClient.common_folder_path) + parse_common_folder_path = staticmethod(CloudBuildClient.parse_common_folder_path) + + common_organization_path = staticmethod(CloudBuildClient.common_organization_path) + parse_common_organization_path = staticmethod( + CloudBuildClient.parse_common_organization_path + ) + + common_project_path = staticmethod(CloudBuildClient.common_project_path) + parse_common_project_path = staticmethod(CloudBuildClient.parse_common_project_path) + + common_location_path = staticmethod(CloudBuildClient.common_location_path) + parse_common_location_path = staticmethod( + CloudBuildClient.parse_common_location_path + ) + from_service_account_file = CloudBuildClient.from_service_account_file from_service_account_json = from_service_account_file + @property + def transport(self) -> CloudBuildTransport: + """Return the transport used by the client instance. + + Returns: + CloudBuildTransport: The transport used by the client instance. + """ + return self._client.transport + get_transport_class = functools.partial( type(CloudBuildClient).get_transport_class, type(CloudBuildClient) ) @@ -69,6 +110,7 @@ def __init__( credentials: credentials.Credentials = None, transport: Union[str, CloudBuildTransport] = "grpc_asyncio", client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the cloud build client. @@ -84,16 +126,19 @@ def __init__( client_options (ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint, this is the default value for - the environment variable) and "auto" (auto switch to the default - mTLS endpoint if client SSL credentials is present). However, - the ``api_endpoint`` property takes precedence if provided. - (2) The ``client_cert_source`` property is used to provide client - SSL credentials for mutual TLS transport. If not provided, the - default SSL credentials will be used if present. + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -101,7 +146,10 @@ def __init__( """ self._client = CloudBuildClient( - credentials=credentials, transport=transport, client_options=client_options, + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, ) async def create_build( @@ -173,7 +221,8 @@ async def create_build( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, build]): + has_flattened_params = any([project_id, build]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -194,7 +243,7 @@ async def create_build( rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_build, default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -276,7 +325,8 @@ async def get_build( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, id]): + has_flattened_params = any([project_id, id]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -301,11 +351,11 @@ async def get_build( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -360,7 +410,8 @@ async def list_builds( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, filter]): + has_flattened_params = any([project_id, filter]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -385,11 +436,11 @@ async def list_builds( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -465,7 +516,8 @@ async def cancel_build( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, id]): + has_flattened_params = any([project_id, id]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -486,7 +538,7 @@ async def cancel_build( rpc = gapic_v1.method_async.wrap_method( self._client._transport.cancel_build, default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -590,7 +642,8 @@ async def retry_build( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, id]): + has_flattened_params = any([project_id, id]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -611,7 +664,7 @@ async def retry_build( rpc = gapic_v1.method_async.wrap_method( self._client._transport.retry_build, default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -674,7 +727,8 @@ async def create_build_trigger( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, trigger]): + has_flattened_params = any([project_id, trigger]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -695,7 +749,7 @@ async def create_build_trigger( rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_build_trigger, default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -751,7 +805,8 @@ async def get_build_trigger( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, trigger_id]): + has_flattened_params = any([project_id, trigger_id]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -776,11 +831,11 @@ async def get_build_trigger( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -830,7 +885,8 @@ async def list_build_triggers( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id]): + has_flattened_params = any([project_id]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -853,11 +909,11 @@ async def list_build_triggers( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -910,7 +966,8 @@ async def delete_build_trigger( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, trigger_id]): + has_flattened_params = any([project_id, trigger_id]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -935,11 +992,11 @@ async def delete_build_trigger( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -999,7 +1056,8 @@ async def update_build_trigger( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, trigger_id, trigger]): + has_flattened_params = any([project_id, trigger_id, trigger]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1022,7 +1080,7 @@ async def update_build_trigger( rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_build_trigger, default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -1104,7 +1162,8 @@ async def run_build_trigger( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, trigger_id, source]): + has_flattened_params = any([project_id, trigger_id, source]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1127,7 +1186,7 @@ async def run_build_trigger( rpc = gapic_v1.method_async.wrap_method( self._client._transport.run_build_trigger, default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -1193,7 +1252,7 @@ async def create_worker_pool( rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_worker_pool, default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -1254,11 +1313,11 @@ async def get_worker_pool( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -1298,7 +1357,7 @@ async def delete_worker_pool( rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_worker_pool, default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -1353,7 +1412,7 @@ async def update_worker_pool( rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_worker_pool, default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -1401,11 +1460,11 @@ async def list_worker_pools( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), ), default_timeout=600.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -1416,13 +1475,13 @@ async def list_worker_pools( try: - _client_info = gapic_v1.client_info.ClientInfo( + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( "google-devtools-cloudbuild", ).version, ) except pkg_resources.DistributionNotFound: - _client_info = gapic_v1.client_info.ClientInfo() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() __all__ = ("CloudBuildAsyncClient",) diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py index a89cd044..a301a515 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/client.py @@ -16,28 +16,30 @@ # from collections import OrderedDict +from distutils import util import os import re -from typing import Callable, Dict, Sequence, Tuple, Type, Union +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import client_options as client_options_lib # type: ignore from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials # type: ignore from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -from google.api_core import operation -from google.api_core import operation_async +from google.api_core import operation # type: ignore +from google.api_core import operation_async # type: ignore from google.cloud.devtools.cloudbuild_v1.services.cloud_build import pagers from google.cloud.devtools.cloudbuild_v1.types import cloudbuild from google.protobuf import duration_pb2 as duration # type: ignore from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from .transports.base import CloudBuildTransport +from .transports.base import CloudBuildTransport, DEFAULT_CLIENT_INFO from .transports.grpc import CloudBuildGrpcTransport from .transports.grpc_asyncio import CloudBuildGrpcAsyncIOTransport @@ -138,12 +140,121 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @property + def transport(self) -> CloudBuildTransport: + """Return the transport used by the client instance. + + Returns: + CloudBuildTransport: The transport used by the client instance. + """ + return self._transport + + @staticmethod + def build_path(project: str, build: str,) -> str: + """Return a fully-qualified build string.""" + return "projects/{project}/builds/{build}".format(project=project, build=build,) + + @staticmethod + def parse_build_path(path: str) -> Dict[str, str]: + """Parse a build path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/builds/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def build_trigger_path(project: str, trigger: str,) -> str: + """Return a fully-qualified build_trigger string.""" + return "projects/{project}/triggers/{trigger}".format( + project=project, trigger=trigger, + ) + + @staticmethod + def parse_build_trigger_path(path: str) -> Dict[str, str]: + """Parse a build_trigger path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/triggers/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def service_account_path(project: str, service_account: str,) -> str: + """Return a fully-qualified service_account string.""" + return "projects/{project}/serviceAccounts/{service_account}".format( + project=project, service_account=service_account, + ) + + @staticmethod + def parse_service_account_path(path: str) -> Dict[str, str]: + """Parse a service_account path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/serviceAccounts/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Return a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Return a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Return a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Return a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Return a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + def __init__( self, *, - credentials: credentials.Credentials = None, - transport: Union[str, CloudBuildTransport] = None, - client_options: ClientOptions = None, + credentials: Optional[credentials.Credentials] = None, + transport: Union[str, CloudBuildTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the cloud build client. @@ -156,48 +267,74 @@ def __init__( transport (Union[str, ~.CloudBuildTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. + client_options (client_options_lib.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint, this is the default value for - the environment variable) and "auto" (auto switch to the default - mTLS endpoint if client SSL credentials is present). However, - the ``api_endpoint`` property takes precedence if provided. - (2) The ``client_cert_source`` property is used to provide client - SSL credentials for mutual TLS transport. If not provided, the - default SSL credentials will be used if present. + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ if isinstance(client_options, dict): - client_options = ClientOptions.from_dict(client_options) + client_options = client_options_lib.from_dict(client_options) if client_options is None: - client_options = ClientOptions.ClientOptions() + client_options = client_options_lib.ClientOptions() - if client_options.api_endpoint is None: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never") + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + ssl_credentials = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + import grpc # type: ignore + + cert, key = client_options.client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + is_mtls = True + else: + creds = SslCredentials() + is_mtls = creds.is_mtls + ssl_credentials = creds.ssl_credentials if is_mtls else None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_mtls_env == "never": - client_options.api_endpoint = self.DEFAULT_ENDPOINT + api_endpoint = self.DEFAULT_ENDPOINT elif use_mtls_env == "always": - client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT + api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - has_client_cert_source = ( - client_options.client_cert_source is not None - or mtls.has_default_client_cert_source() - ) - client_options.api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT - if has_client_cert_source - else self.DEFAULT_ENDPOINT + api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT ) else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" ) # Save or instantiate the transport. @@ -221,10 +358,11 @@ def __init__( self._transport = Transport( credentials=credentials, credentials_file=client_options.credentials_file, - host=client_options.api_endpoint, + host=api_endpoint, scopes=client_options.scopes, - api_mtls_endpoint=client_options.api_endpoint, - client_cert_source=client_options.client_cert_source, + ssl_channel_credentials=ssl_credentials, + quota_project_id=client_options.quota_project_id, + client_info=client_info, ) def create_build( @@ -296,29 +434,31 @@ def create_build( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, build]): + has_flattened_params = any([project_id, build]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = cloudbuild.CreateBuildRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.CreateBuildRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.CreateBuildRequest): + request = cloudbuild.CreateBuildRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if project_id is not None: - request.project_id = project_id - if build is not None: - request.build = build + if project_id is not None: + request.project_id = project_id + if build is not None: + request.build = build # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.create_build, - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.create_build] # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -399,37 +539,31 @@ def get_build( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, id]): + has_flattened_params = any([project_id, id]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = cloudbuild.GetBuildRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.GetBuildRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.GetBuildRequest): + request = cloudbuild.GetBuildRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if project_id is not None: - request.project_id = project_id - if id is not None: - request.id = id + if project_id is not None: + request.project_id = project_id + if id is not None: + request.id = id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_build, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, - ), - ), - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.get_build] # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -483,37 +617,31 @@ def list_builds( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, filter]): + has_flattened_params = any([project_id, filter]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = cloudbuild.ListBuildsRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.ListBuildsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.ListBuildsRequest): + request = cloudbuild.ListBuildsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if project_id is not None: - request.project_id = project_id - if filter is not None: - request.filter = filter + if project_id is not None: + request.project_id = project_id + if filter is not None: + request.filter = filter # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_builds, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, - ), - ), - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.list_builds] # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -588,29 +716,31 @@ def cancel_build( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, id]): + has_flattened_params = any([project_id, id]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = cloudbuild.CancelBuildRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.CancelBuildRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.CancelBuildRequest): + request = cloudbuild.CancelBuildRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if project_id is not None: - request.project_id = project_id - if id is not None: - request.id = id + if project_id is not None: + request.project_id = project_id + if id is not None: + request.id = id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.cancel_build, - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.cancel_build] # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -713,29 +843,31 @@ def retry_build( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, id]): + has_flattened_params = any([project_id, id]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = cloudbuild.RetryBuildRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.RetryBuildRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.RetryBuildRequest): + request = cloudbuild.RetryBuildRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if project_id is not None: - request.project_id = project_id - if id is not None: - request.id = id + if project_id is not None: + request.project_id = project_id + if id is not None: + request.id = id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.retry_build, - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.retry_build] # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -797,29 +929,31 @@ def create_build_trigger( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, trigger]): + has_flattened_params = any([project_id, trigger]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = cloudbuild.CreateBuildTriggerRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.CreateBuildTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.CreateBuildTriggerRequest): + request = cloudbuild.CreateBuildTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if project_id is not None: - request.project_id = project_id - if trigger is not None: - request.trigger = trigger + if project_id is not None: + request.project_id = project_id + if trigger is not None: + request.trigger = trigger # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.create_build_trigger, - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.create_build_trigger] # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -874,37 +1008,31 @@ def get_build_trigger( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, trigger_id]): + has_flattened_params = any([project_id, trigger_id]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = cloudbuild.GetBuildTriggerRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.GetBuildTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.GetBuildTriggerRequest): + request = cloudbuild.GetBuildTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if project_id is not None: - request.project_id = project_id - if trigger_id is not None: - request.trigger_id = trigger_id + if project_id is not None: + request.project_id = project_id + if trigger_id is not None: + request.trigger_id = trigger_id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_build_trigger, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, - ), - ), - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.get_build_trigger] # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -953,35 +1081,29 @@ def list_build_triggers( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id]): + has_flattened_params = any([project_id]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = cloudbuild.ListBuildTriggersRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.ListBuildTriggersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.ListBuildTriggersRequest): + request = cloudbuild.ListBuildTriggersRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if project_id is not None: - request.project_id = project_id + if project_id is not None: + request.project_id = project_id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_build_triggers, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, - ), - ), - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.list_build_triggers] # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -1033,37 +1155,31 @@ def delete_build_trigger( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, trigger_id]): + has_flattened_params = any([project_id, trigger_id]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = cloudbuild.DeleteBuildTriggerRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.DeleteBuildTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.DeleteBuildTriggerRequest): + request = cloudbuild.DeleteBuildTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if project_id is not None: - request.project_id = project_id - if trigger_id is not None: - request.trigger_id = trigger_id + if project_id is not None: + request.project_id = project_id + if trigger_id is not None: + request.trigger_id = trigger_id # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_build_trigger, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, - ), - ), - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.delete_build_trigger] # Send the request. rpc( @@ -1122,31 +1238,33 @@ def update_build_trigger( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, trigger_id, trigger]): + has_flattened_params = any([project_id, trigger_id, trigger]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = cloudbuild.UpdateBuildTriggerRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.UpdateBuildTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.UpdateBuildTriggerRequest): + request = cloudbuild.UpdateBuildTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if project_id is not None: - request.project_id = project_id - if trigger_id is not None: - request.trigger_id = trigger_id - if trigger is not None: - request.trigger = trigger + if project_id is not None: + request.project_id = project_id + if trigger_id is not None: + request.trigger_id = trigger_id + if trigger is not None: + request.trigger = trigger # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.update_build_trigger, - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.update_build_trigger] # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -1227,31 +1345,33 @@ def run_build_trigger( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([project_id, trigger_id, source]): + has_flattened_params = any([project_id, trigger_id, source]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) - request = cloudbuild.RunBuildTriggerRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.RunBuildTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.RunBuildTriggerRequest): + request = cloudbuild.RunBuildTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. + # If we have keyword arguments corresponding to fields on the + # request, apply these. - if project_id is not None: - request.project_id = project_id - if trigger_id is not None: - request.trigger_id = trigger_id - if source is not None: - request.source = source + if project_id is not None: + request.project_id = project_id + if trigger_id is not None: + request.trigger_id = trigger_id + if source is not None: + request.source = source # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.run_build_trigger, - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.run_build_trigger] # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -1309,15 +1429,16 @@ def create_worker_pool( """ # Create or coerce a protobuf request object. - request = cloudbuild.CreateWorkerPoolRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.CreateWorkerPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.CreateWorkerPoolRequest): + request = cloudbuild.CreateWorkerPoolRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.create_worker_pool, - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.create_worker_pool] # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -1366,23 +1487,16 @@ def get_worker_pool( """ # Create or coerce a protobuf request object. - request = cloudbuild.GetWorkerPoolRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.GetWorkerPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.GetWorkerPoolRequest): + request = cloudbuild.GetWorkerPoolRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.get_worker_pool, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, - ), - ), - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.get_worker_pool] # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -1414,15 +1528,16 @@ def delete_worker_pool( """ # Create or coerce a protobuf request object. - request = cloudbuild.DeleteWorkerPoolRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.DeleteWorkerPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.DeleteWorkerPoolRequest): + request = cloudbuild.DeleteWorkerPoolRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.delete_worker_pool, - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.delete_worker_pool] # Send the request. rpc( @@ -1469,15 +1584,16 @@ def update_worker_pool( """ # Create or coerce a protobuf request object. - request = cloudbuild.UpdateWorkerPoolRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.UpdateWorkerPoolRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.UpdateWorkerPoolRequest): + request = cloudbuild.UpdateWorkerPoolRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.update_worker_pool, - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.update_worker_pool] # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -1513,23 +1629,16 @@ def list_worker_pools( """ # Create or coerce a protobuf request object. - request = cloudbuild.ListWorkerPoolsRequest(request) + # Minor optimization to avoid making a copy if the user passes + # in a cloudbuild.ListWorkerPoolsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, cloudbuild.ListWorkerPoolsRequest): + request = cloudbuild.ListWorkerPoolsRequest(request) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. - rpc = gapic_v1.method.wrap_method( - self._transport.list_worker_pools, - default_retry=retries.Retry( - initial=0.1, - maximum=60.0, - multiplier=1.3, - predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, - ), - ), - default_timeout=600.0, - client_info=_client_info, - ) + rpc = self._transport._wrapped_methods[self._transport.list_worker_pools] # Send the request. response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) @@ -1539,13 +1648,13 @@ def list_worker_pools( try: - _client_info = gapic_v1.client_info.ClientInfo( + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( "google-devtools-cloudbuild", ).version, ) except pkg_resources.DistributionNotFound: - _client_info = gapic_v1.client_info.ClientInfo() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() __all__ = ("CloudBuildClient",) diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py index ace575ab..d796b5b0 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/base.py @@ -17,9 +17,12 @@ import abc import typing +import pkg_resources -from google import auth +from google import auth # type: ignore from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore from google.api_core import operations_v1 # type: ignore from google.auth import credentials # type: ignore @@ -28,6 +31,16 @@ from google.protobuf import empty_pb2 as empty # type: ignore +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-devtools-cloudbuild", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + class CloudBuildTransport(abc.ABC): """Abstract transport class for CloudBuild.""" @@ -40,6 +53,8 @@ def __init__( credentials: credentials.Credentials = None, credentials_file: typing.Optional[str] = None, scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. @@ -55,6 +70,13 @@ def __init__( be loaded with :func:`google.auth.load_credentials_from_file`. This argument is mutually exclusive with credentials. scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: @@ -70,14 +92,147 @@ def __init__( if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes + credentials_file, scopes=scopes, quota_project_id=quota_project_id ) + elif credentials is None: - credentials, _ = auth.default(scopes=scopes) + credentials, _ = auth.default( + scopes=scopes, quota_project_id=quota_project_id + ) # Save the credentials. self._credentials = credentials + # Lifted into its own function so it can be stubbed out during tests. + self._prep_wrapped_messages(client_info) + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_build: gapic_v1.method.wrap_method( + self.create_build, default_timeout=600.0, client_info=client_info, + ), + self.get_build: gapic_v1.method.wrap_method( + self.get_build, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=600.0, + client_info=client_info, + ), + self.list_builds: gapic_v1.method.wrap_method( + self.list_builds, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=600.0, + client_info=client_info, + ), + self.cancel_build: gapic_v1.method.wrap_method( + self.cancel_build, default_timeout=600.0, client_info=client_info, + ), + self.retry_build: gapic_v1.method.wrap_method( + self.retry_build, default_timeout=600.0, client_info=client_info, + ), + self.create_build_trigger: gapic_v1.method.wrap_method( + self.create_build_trigger, + default_timeout=600.0, + client_info=client_info, + ), + self.get_build_trigger: gapic_v1.method.wrap_method( + self.get_build_trigger, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=600.0, + client_info=client_info, + ), + self.list_build_triggers: gapic_v1.method.wrap_method( + self.list_build_triggers, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=600.0, + client_info=client_info, + ), + self.delete_build_trigger: gapic_v1.method.wrap_method( + self.delete_build_trigger, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=600.0, + client_info=client_info, + ), + self.update_build_trigger: gapic_v1.method.wrap_method( + self.update_build_trigger, + default_timeout=600.0, + client_info=client_info, + ), + self.run_build_trigger: gapic_v1.method.wrap_method( + self.run_build_trigger, default_timeout=600.0, client_info=client_info, + ), + self.create_worker_pool: gapic_v1.method.wrap_method( + self.create_worker_pool, default_timeout=600.0, client_info=client_info, + ), + self.get_worker_pool: gapic_v1.method.wrap_method( + self.get_worker_pool, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=600.0, + client_info=client_info, + ), + self.delete_worker_pool: gapic_v1.method.wrap_method( + self.delete_worker_pool, default_timeout=600.0, client_info=client_info, + ), + self.update_worker_pool: gapic_v1.method.wrap_method( + self.update_worker_pool, default_timeout=600.0, client_info=client_info, + ), + self.list_worker_pools: gapic_v1.method.wrap_method( + self.list_worker_pools, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=600.0, + client_info=client_info, + ), + } + @property def operations_client(self) -> operations_v1.OperationsClient: """Return the client designed to process long-running operations.""" diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py index a3570363..88497cfe 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc.py @@ -15,22 +15,23 @@ # limitations under the License. # +import warnings from typing import Callable, Dict, Optional, Sequence, Tuple from google.api_core import grpc_helpers # type: ignore from google.api_core import operations_v1 # type: ignore +from google.api_core import gapic_v1 # type: ignore from google import auth # type: ignore from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore - import grpc # type: ignore from google.cloud.devtools.cloudbuild_v1.types import cloudbuild from google.longrunning import operations_pb2 as operations # type: ignore from google.protobuf import empty_pb2 as empty # type: ignore -from .base import CloudBuildTransport +from .base import CloudBuildTransport, DEFAULT_CLIENT_INFO class CloudBuildGrpcTransport(CloudBuildTransport): @@ -64,7 +65,10 @@ def __init__( scopes: Sequence[str] = None, channel: grpc.Channel = None, api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the transport. @@ -83,14 +87,23 @@ def __init__( ignored if ``channel`` is provided. channel (Optional[grpc.Channel]): A ``Channel`` instance through which to make calls. - api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If - provided, it overrides the ``host`` argument and tries to create + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A - callback to provide client SSL certificate bytes and private key - bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` - is None. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -98,6 +111,8 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -105,7 +120,13 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + host = ( api_mtls_endpoint if ":" in api_mtls_endpoint @@ -113,7 +134,9 @@ def __init__( ) if credentials is None: - credentials, _ = auth.default(scopes=self.AUTH_SCOPES) + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) # Create SSL credentials with client_cert_source or application # default SSL credentials. @@ -132,7 +155,28 @@ def __init__( credentials_file=credentials_file, ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, ) + self._ssl_channel_credentials = ssl_credentials + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + self._stubs = {} # type: Dict[str, Callable] # Run the base constructor. super().__init__( @@ -140,10 +184,10 @@ def __init__( credentials=credentials, credentials_file=credentials_file, scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, ) - self._stubs = {} # type: Dict[str, Callable] - @classmethod def create_channel( cls, @@ -151,7 +195,8 @@ def create_channel( credentials: credentials.Credentials = None, credentials_file: str = None, scopes: Optional[Sequence[str]] = None, - **kwargs + quota_project_id: Optional[str] = None, + **kwargs, ) -> grpc.Channel: """Create and return a gRPC channel object. Args: @@ -167,6 +212,8 @@ def create_channel( scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. kwargs (Optional[dict]): Keyword arguments, which are passed to the channel creation. Returns: @@ -182,24 +229,14 @@ def create_channel( credentials=credentials, credentials_file=credentials_file, scopes=scopes, - **kwargs + quota_project_id=quota_project_id, + **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. + """Return the channel designed to connect to this service. """ - # Sanity check: Only create a new channel if we do not already - # have one. - if not hasattr(self, "_grpc_channel"): - self._grpc_channel = self.create_channel( - self._host, credentials=self._credentials, - ) - - # Return the channel from cache. return self._grpc_channel @property diff --git a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py index 432dfb85..a2716bb8 100644 --- a/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py +++ b/google/cloud/devtools/cloudbuild_v1/services/cloud_build/transports/grpc_asyncio.py @@ -15,10 +15,13 @@ # limitations under the License. # +import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore from google.api_core import operations_v1 # type: ignore +from google import auth # type: ignore from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -29,7 +32,7 @@ from google.longrunning import operations_pb2 as operations # type: ignore from google.protobuf import empty_pb2 as empty # type: ignore -from .base import CloudBuildTransport +from .base import CloudBuildTransport, DEFAULT_CLIENT_INFO from .grpc import CloudBuildGrpcTransport @@ -63,7 +66,8 @@ def create_channel( credentials: credentials.Credentials = None, credentials_file: Optional[str] = None, scopes: Optional[Sequence[str]] = None, - **kwargs + quota_project_id: Optional[str] = None, + **kwargs, ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: @@ -79,6 +83,8 @@ def create_channel( scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. kwargs (Optional[dict]): Keyword arguments, which are passed to the channel creation. Returns: @@ -90,7 +96,8 @@ def create_channel( credentials=credentials, credentials_file=credentials_file, scopes=scopes, - **kwargs + quota_project_id=quota_project_id, + **kwargs, ) def __init__( @@ -102,7 +109,10 @@ def __init__( scopes: Optional[Sequence[str]] = None, channel: aio.Channel = None, api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the transport. @@ -122,14 +132,23 @@ def __init__( are passed to :func:`google.auth.default`. channel (Optional[aio.Channel]): A ``Channel`` instance through which to make calls. - api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If - provided, it overrides the ``host`` argument and tries to create + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A - callback to provide client SSL certificate bytes and private key - bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` - is None. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -137,6 +156,8 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -144,13 +165,24 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + host = ( api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" ) + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + # Create SSL credentials with client_cert_source or application # default SSL credentials. if client_cert_source: @@ -168,6 +200,25 @@ def __init__( credentials_file=credentials_file, ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + self._ssl_channel_credentials = ssl_credentials + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, ) # Run the base constructor. @@ -176,6 +227,8 @@ def __init__( credentials=credentials, credentials_file=credentials_file, scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + client_info=client_info, ) self._stubs = {} @@ -187,13 +240,6 @@ def grpc_channel(self) -> aio.Channel: This property caches on the instance; repeated calls return the same channel. """ - # Sanity check: Only create a new channel if we do not already - # have one. - if not hasattr(self, "_grpc_channel"): - self._grpc_channel = self.create_channel( - self._host, credentials=self._credentials, - ) - # Return the channel from cache. return self._grpc_channel diff --git a/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py b/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py index 371c86c4..ae9dca1a 100644 --- a/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py +++ b/google/cloud/devtools/cloudbuild_v1/types/cloudbuild.py @@ -76,12 +76,17 @@ class RetryBuildRequest(proto.Message): r"""Specifies a build to retry. Attributes: + name (str): + The name of the ``Build`` to retry. Format: + ``projects/{project}/locations/{location}/builds/{build}`` project_id (str): Required. ID of the project. id (str): Required. Build ID of the original build. """ + name = proto.Field(proto.STRING, number=3) + project_id = proto.Field(proto.STRING, number=1) id = proto.Field(proto.STRING, number=2) @@ -116,7 +121,7 @@ class StorageSource(proto.Message): Google Cloud Storage bucket containing the source (see `Bucket Name Requirements `__). - object (str): + object_ (str): Google Cloud Storage object containing the source. This object must be a gzipped archive file (``.tar.gz``) @@ -129,7 +134,7 @@ class StorageSource(proto.Message): bucket = proto.Field(proto.STRING, number=1) - object = proto.Field(proto.STRING, number=2) + object_ = proto.Field(proto.STRING, number=2) generation = proto.Field(proto.INT64, number=3) @@ -157,7 +162,7 @@ class RepoSource(proto.Message): https://github.com/google/re2/wiki/Syntax commit_sha (str): Explicit commit SHA to build. - dir (str): + dir_ (str): Directory, relative to the source root, in which to run the build. @@ -182,7 +187,7 @@ class RepoSource(proto.Message): commit_sha = proto.Field(proto.STRING, number=5, oneof="revision") - dir = proto.Field(proto.STRING, number=7) + dir_ = proto.Field(proto.STRING, number=7) invert_regex = proto.Field(proto.BOOL, number=8) @@ -202,11 +207,11 @@ class Source(proto.Message): """ storage_source = proto.Field( - proto.MESSAGE, number=2, oneof="source", message=StorageSource, + proto.MESSAGE, number=2, oneof="source", message="StorageSource", ) repo_source = proto.Field( - proto.MESSAGE, number=3, oneof="source", message=RepoSource, + proto.MESSAGE, number=3, oneof="source", message="RepoSource", ) @@ -269,7 +274,7 @@ class BuildStep(proto.Message): entrypoint. If the image does not define an entrypoint, the first element in args is used as the entrypoint, and the remainder will be used as arguments. - dir (str): + dir_ (str): Working directory to use when running this step's container. If this value is a relative path, it is relative to the @@ -333,7 +338,7 @@ class BuildStep(proto.Message): args = proto.RepeatedField(proto.STRING, number=3) - dir = proto.Field(proto.STRING, number=4) + dir_ = proto.Field(proto.STRING, number=4) id = proto.Field(proto.STRING, number=5) @@ -407,7 +412,7 @@ class Results(proto.Message): Time to push all non-container artifacts. """ - images = proto.RepeatedField(proto.MESSAGE, number=2, message=BuiltImage,) + images = proto.RepeatedField(proto.MESSAGE, number=2, message="BuiltImage",) build_step_images = proto.RepeatedField(proto.STRING, number=3) @@ -458,6 +463,11 @@ class Build(proto.Message): - $SHORT_SHA: first 7 characters of $REVISION_ID or $COMMIT_SHA. Attributes: + name (str): + Output only. The 'Build' name with format: + ``projects/{project}/locations/{location}/builds/{build}``, + where {build} is a unique identifier generated by the + service. id (str): Output only. Unique identifier of the build. project_id (str): @@ -492,6 +502,8 @@ class Build(proto.Message): the build will cease and the build status will be ``TIMEOUT``. + ``timeout`` starts ticking from ``startTime``. + Default time is ten minutes. images (Sequence[str]): A list of images to be pushed upon the successful completion @@ -550,6 +562,13 @@ class Build(proto.Message): If the build does not specify source or images, these keys will not be included. + service_account (str): + IAM service account whose credentials will be used at build + runtime. Must be of the format + ``projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT}``. ACCOUNT + can be email address or uniqueId of the service account. + + This field is in alpha and is not publicly available. """ class Status(proto.Enum): @@ -564,6 +583,8 @@ class Status(proto.Enum): CANCELLED = 7 EXPIRED = 9 + name = proto.Field(proto.STRING, number=45) + id = proto.Field(proto.STRING, number=1) project_id = proto.Field(proto.STRING, number=16) @@ -572,11 +593,11 @@ class Status(proto.Enum): status_detail = proto.Field(proto.STRING, number=24) - source = proto.Field(proto.MESSAGE, number=3, message=Source,) + source = proto.Field(proto.MESSAGE, number=3, message="Source",) - steps = proto.RepeatedField(proto.MESSAGE, number=11, message=BuildStep,) + steps = proto.RepeatedField(proto.MESSAGE, number=11, message="BuildStep",) - results = proto.Field(proto.MESSAGE, number=10, message=Results,) + results = proto.Field(proto.MESSAGE, number=10, message="Results",) create_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) @@ -612,6 +633,8 @@ class Status(proto.Enum): timing = proto.MapField(proto.STRING, proto.MESSAGE, number=33, message="TimeSpan",) + service_account = proto.Field(proto.STRING, number=42) + class Artifacts(proto.Message): r"""Artifacts produced by a build that should be uploaded upon @@ -700,7 +723,7 @@ class BuildOperationMetadata(proto.Message): The build that the operation is tracking. """ - build = proto.Field(proto.MESSAGE, number=1, message=Build,) + build = proto.Field(proto.MESSAGE, number=1, message="Build",) class SourceProvenance(proto.Message): @@ -730,10 +753,10 @@ class SourceProvenance(proto.Message): """ resolved_storage_source = proto.Field( - proto.MESSAGE, number=3, message=StorageSource, + proto.MESSAGE, number=3, message="StorageSource", ) - resolved_repo_source = proto.Field(proto.MESSAGE, number=6, message=RepoSource,) + resolved_repo_source = proto.Field(proto.MESSAGE, number=6, message="RepoSource",) file_hashes = proto.MapField( proto.STRING, proto.MESSAGE, number=4, message="FileHashes", @@ -757,7 +780,7 @@ class Hash(proto.Message): r"""Container message for hash values. Attributes: - type (~.cloudbuild.Hash.HashType): + type_ (~.cloudbuild.Hash.HashType): The type of hash that was performed. value (bytes): The hash value. @@ -769,7 +792,7 @@ class HashType(proto.Enum): SHA256 = 1 MD5 = 2 - type = proto.Field(proto.ENUM, number=1, enum=HashType,) + type_ = proto.Field(proto.ENUM, number=1, enum=HashType,) value = proto.Field(proto.BYTES, number=2) @@ -802,27 +825,37 @@ class CreateBuildRequest(proto.Message): r"""Request to create a new build. Attributes: + parent (str): + The parent resource where this build will be created. + Format: ``projects/{project}/locations/{location}`` project_id (str): Required. ID of the project. build (~.cloudbuild.Build): Required. Build resource to create. """ + parent = proto.Field(proto.STRING, number=4) + project_id = proto.Field(proto.STRING, number=1) - build = proto.Field(proto.MESSAGE, number=2, message=Build,) + build = proto.Field(proto.MESSAGE, number=2, message="Build",) class GetBuildRequest(proto.Message): r"""Request to get a build. Attributes: + name (str): + The name of the ``Build`` to retrieve. Format: + ``projects/{project}/locations/{location}/builds/{build}`` project_id (str): Required. ID of the project. id (str): Required. ID of the build. """ + name = proto.Field(proto.STRING, number=4) + project_id = proto.Field(proto.STRING, number=1) id = proto.Field(proto.STRING, number=2) @@ -832,6 +865,9 @@ class ListBuildsRequest(proto.Message): r"""Request to list builds. Attributes: + parent (str): + The parent of the collection of ``Builds``. Format: + ``projects/{project}/locations/location`` project_id (str): Required. ID of the project. page_size (int): @@ -843,6 +879,8 @@ class ListBuildsRequest(proto.Message): The raw filter text to constrain the results. """ + parent = proto.Field(proto.STRING, number=9) + project_id = proto.Field(proto.STRING, number=1) page_size = proto.Field(proto.INT32, number=2) @@ -866,7 +904,7 @@ class ListBuildsResponse(proto.Message): def raw_page(self): return self - builds = proto.RepeatedField(proto.MESSAGE, number=1, message=Build,) + builds = proto.RepeatedField(proto.MESSAGE, number=1, message="Build",) next_page_token = proto.Field(proto.STRING, number=2) @@ -875,12 +913,17 @@ class CancelBuildRequest(proto.Message): r"""Request to cancel an ongoing build. Attributes: + name (str): + The name of the ``Build`` to retrieve. Format: + ``projects/{project}/locations/{location}/builds/{build}`` project_id (str): Required. ID of the project. id (str): Required. ID of the build. """ + name = proto.Field(proto.STRING, number=4) + project_id = proto.Field(proto.STRING, number=1) id = proto.Field(proto.STRING, number=2) @@ -929,12 +972,11 @@ class BuildTrigger(proto.Message): Output only. Time when the trigger was created. disabled (bool): - If true, the trigger will never result in a - build. + If true, the trigger will never automatically + execute a build. substitutions (Sequence[~.cloudbuild.BuildTrigger.SubstitutionsEntry]): Substitutions for Build resource. The keys must match the - following regular expression: ``^_[A-Z0-9_]+$``.The keys - cannot conflict with the keys in bindings. + following regular expression: ``^_[A-Z0-9_]+$``. ignored_files (Sequence[str]): ignored_files and included_files are file glob matches using https://golang.org/pkg/path/filepath/#Match extended with @@ -967,11 +1009,13 @@ class BuildTrigger(proto.Message): tags = proto.RepeatedField(proto.STRING, number=19) - trigger_template = proto.Field(proto.MESSAGE, number=7, message=RepoSource,) + trigger_template = proto.Field(proto.MESSAGE, number=7, message="RepoSource",) github = proto.Field(proto.MESSAGE, number=13, message="GitHubEventsConfig",) - build = proto.Field(proto.MESSAGE, number=4, oneof="build_template", message=Build,) + build = proto.Field( + proto.MESSAGE, number=4, oneof="build_template", message="Build", + ) filename = proto.Field(proto.STRING, number=8, oneof="build_template") @@ -1036,8 +1080,8 @@ class PullRequestFilter(proto.Message): is the syntax accepted by RE2 and described at https://github.com/google/re2/wiki/Syntax comment_control (~.cloudbuild.PullRequestFilter.CommentControl): - Whether to block builds on a "/gcbrun" - comment from a repository admin or collaborator. + Configure builds to run whether a repository owner or + collaborator need to comment ``/gcbrun``. invert_regex (bool): If true, branches that do NOT match the git_ref will trigger a build. @@ -1095,7 +1139,7 @@ class CreateBuildTriggerRequest(proto.Message): project_id = proto.Field(proto.STRING, number=1) - trigger = proto.Field(proto.MESSAGE, number=2, message=BuildTrigger,) + trigger = proto.Field(proto.MESSAGE, number=2, message="BuildTrigger",) class GetBuildTriggerRequest(proto.Message): @@ -1151,7 +1195,7 @@ class ListBuildTriggersResponse(proto.Message): def raw_page(self): return self - triggers = proto.RepeatedField(proto.MESSAGE, number=1, message=BuildTrigger,) + triggers = proto.RepeatedField(proto.MESSAGE, number=1, message="BuildTrigger",) next_page_token = proto.Field(proto.STRING, number=2) @@ -1189,7 +1233,7 @@ class UpdateBuildTriggerRequest(proto.Message): trigger_id = proto.Field(proto.STRING, number=2) - trigger = proto.Field(proto.MESSAGE, number=3, message=BuildTrigger,) + trigger = proto.Field(proto.MESSAGE, number=3, message="BuildTrigger",) class BuildOptions(proto.Message): @@ -1213,19 +1257,29 @@ class BuildOptions(proto.Message): builds that request more than the maximum are rejected with an error. substitution_option (~.cloudbuild.BuildOptions.SubstitutionOption): - Option to specify behavior when there is an - error in the substitution checks. + Option to specify behavior when there is an error in the + substitution checks. + + NOTE: this is always set to ALLOW_LOOSE for triggered builds + and cannot be overridden in the build configuration file. + dynamic_substitutions (bool): + Option to specify whether or not to apply + bash style string operations to the + substitutions. + NOTE: this is always enabled for triggered + builds and cannot be overridden in the build + configuration file. log_streaming_option (~.cloudbuild.BuildOptions.LogStreamingOption): Option to define build log streaming behavior to Google Cloud Storage. worker_pool (str): Option to specify a ``WorkerPool`` for the build. Format: - projects/{project}/workerPools/{workerPool} + projects/{project}/locations/{location}/workerPools/{workerPool} This field is experimental. logging (~.cloudbuild.BuildOptions.LoggingMode): Option to specify the logging mode, which - determines where the logs are stored. + determines if and where build logs are stored. env (Sequence[str]): A list of global environment variable definitions that will exist for all build steps @@ -1263,7 +1317,10 @@ class VerifyOption(proto.Enum): VERIFIED = 1 class MachineType(proto.Enum): - r"""Supported VM sizes.""" + r"""Supported Compute Engine machine types. For more information, see + `Machine + types `__. + """ UNSPECIFIED = 0 N1_HIGHCPU_8 = 1 N1_HIGHCPU_32 = 2 @@ -1288,9 +1345,12 @@ class LoggingMode(proto.Enum): LOGGING_UNSPECIFIED = 0 LEGACY = 1 GCS_ONLY = 2 + STACKDRIVER_ONLY = 3 + CLOUD_LOGGING_ONLY = 5 + NONE = 4 source_provenance_hash = proto.RepeatedField( - proto.ENUM, number=1, enum=Hash.HashType, + proto.ENUM, number=1, enum="Hash.HashType", ) requested_verify_option = proto.Field(proto.ENUM, number=2, enum=VerifyOption,) @@ -1301,6 +1361,8 @@ class LoggingMode(proto.Enum): substitution_option = proto.Field(proto.ENUM, number=4, enum=SubstitutionOption,) + dynamic_substitutions = proto.Field(proto.BOOL, number=17) + log_streaming_option = proto.Field(proto.ENUM, number=5, enum=LogStreamingOption,) worker_pool = proto.Field(proto.STRING, number=7) @@ -1311,7 +1373,7 @@ class LoggingMode(proto.Enum): secret_env = proto.RepeatedField(proto.STRING, number=13) - volumes = proto.RepeatedField(proto.MESSAGE, number=14, message=Volume,) + volumes = proto.RepeatedField(proto.MESSAGE, number=14, message="Volume",) class WorkerPool(proto.Message): @@ -1477,7 +1539,7 @@ class CreateWorkerPoolRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) - worker_pool = proto.Field(proto.MESSAGE, number=2, message=WorkerPool,) + worker_pool = proto.Field(proto.MESSAGE, number=2, message="WorkerPool",) class GetWorkerPoolRequest(proto.Message): @@ -1520,7 +1582,7 @@ class UpdateWorkerPoolRequest(proto.Message): name = proto.Field(proto.STRING, number=2) - worker_pool = proto.Field(proto.MESSAGE, number=3, message=WorkerPool,) + worker_pool = proto.Field(proto.MESSAGE, number=3, message="WorkerPool",) class ListWorkerPoolsRequest(proto.Message): @@ -1542,7 +1604,7 @@ class ListWorkerPoolsResponse(proto.Message): ``WorkerPools`` for the project. """ - worker_pools = proto.RepeatedField(proto.MESSAGE, number=1, message=WorkerPool,) + worker_pools = proto.RepeatedField(proto.MESSAGE, number=1, message="WorkerPool",) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/noxfile.py b/noxfile.py index da184cbd..2c77492f 100644 --- a/noxfile.py +++ b/noxfile.py @@ -72,7 +72,9 @@ def default(session): # Install all test dependencies, then install this package in-place. session.install("asyncmock", "pytest-asyncio") - session.install("mock", "pytest", "pytest-cov") + session.install( + "mock", "pytest", "pytest-cov", + ) session.install("-e", ".") # Run py.test against the unit tests. @@ -102,6 +104,10 @@ def system(session): """Run the system test suite.""" system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") + + # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true. + if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false": + session.skip("RUN_SYSTEM_TESTS is set to false, skipping") # Sanity check: Only run tests if the environment variable is set. if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): session.skip("Credentials must be set via environment variable") @@ -162,3 +168,38 @@ def docs(session): os.path.join("docs", ""), os.path.join("docs", "_build", "html", ""), ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docfx(session): + """Build the docfx yaml files for this library.""" + + session.install("-e", ".") + # sphinx-docfx-yaml supports up to sphinx version 1.5.5. + # https://github.com/docascode/sphinx-docfx-yaml/issues/97 + session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-T", # show full traceback on exception + "-N", # no colors + "-D", + ( + "extensions=sphinx.ext.autodoc," + "sphinx.ext.autosummary," + "docfx_yaml.extension," + "sphinx.ext.intersphinx," + "sphinx.ext.coverage," + "sphinx.ext.napoleon," + "sphinx.ext.todo," + "sphinx.ext.viewcode," + "recommonmark" + ), + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh index ff599eb2..21f6d2a2 100755 --- a/scripts/decrypt-secrets.sh +++ b/scripts/decrypt-secrets.sh @@ -20,14 +20,27 @@ ROOT=$( dirname "$DIR" ) # Work from the project root. cd $ROOT +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + # Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ > testing/test-env.sh gcloud secrets versions access latest \ --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ > testing/service-account.json gcloud secrets versions access latest \ --secret="python-docs-samples-client-secrets" \ - > testing/client-secrets.json \ No newline at end of file + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/scripts/fixup_cloudbuild_v1_keywords.py b/scripts/fixup_cloudbuild_v1_keywords.py index 92e726f3..04ca294c 100644 --- a/scripts/fixup_cloudbuild_v1_keywords.py +++ b/scripts/fixup_cloudbuild_v1_keywords.py @@ -1,3 +1,4 @@ +#! /usr/bin/env python3 # -*- coding: utf-8 -*- # Copyright 2020 Google LLC @@ -40,22 +41,23 @@ def partition( class cloudbuildCallTransformer(cst.CSTTransformer): CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'cancel_build': ('project_id', 'id', ), - 'create_build': ('project_id', 'build', ), + 'cancel_build': ('project_id', 'id', 'name', ), + 'create_build': ('project_id', 'build', 'parent', ), 'create_build_trigger': ('project_id', 'trigger', ), 'create_worker_pool': ('parent', 'worker_pool', ), 'delete_build_trigger': ('project_id', 'trigger_id', ), 'delete_worker_pool': ('name', ), - 'get_build': ('project_id', 'id', ), + 'get_build': ('project_id', 'id', 'name', ), 'get_build_trigger': ('project_id', 'trigger_id', ), 'get_worker_pool': ('name', ), - 'list_builds': ('project_id', 'page_size', 'page_token', 'filter', ), + 'list_builds': ('project_id', 'parent', 'page_size', 'page_token', 'filter', ), 'list_build_triggers': ('project_id', 'page_size', 'page_token', ), 'list_worker_pools': ('parent', ), - 'retry_build': ('project_id', 'id', ), + 'retry_build': ('project_id', 'id', 'name', ), 'run_build_trigger': ('project_id', 'trigger_id', 'source', ), 'update_build_trigger': ('project_id', 'trigger_id', 'trigger', ), 'update_worker_pool': ('name', 'worker_pool', ), + } def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: diff --git a/synth.metadata b/synth.metadata index 0e3a4407..62e8880c 100644 --- a/synth.metadata +++ b/synth.metadata @@ -3,15 +3,23 @@ { "git": { "name": ".", - "remote": "https://github.com/googleapis/python-cloudbuild.git", - "sha": "d75e9f590378c67c52ab052cc6122301922a0560" + "remote": "git@github.com:googleapis/python-cloudbuild.git", + "sha": "ef8725c273c132097a5b91590fc0c6ec5d172641" + } + }, + { + "git": { + "name": "googleapis", + "remote": "https://github.com/googleapis/googleapis.git", + "sha": "705962b5a317e083fdbb2311d4baa72df5816686", + "internalRef": "340463146" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "f221423c5c9d28a199c052eb4962afac4b749ea3" + "sha": "ba9918cd22874245b55734f57470c719b577e591" } } ], diff --git a/tests/unit/gapic/cloudbuild_v1/__init__.py b/tests/unit/gapic/cloudbuild_v1/__init__.py index e69de29b..8b137891 100644 --- a/tests/unit/gapic/cloudbuild_v1/__init__.py +++ b/tests/unit/gapic/cloudbuild_v1/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py b/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py index fff9e16f..f2583b92 100644 --- a/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py +++ b/tests/unit/gapic/cloudbuild_v1/test_cloud_build.py @@ -31,7 +31,7 @@ from google.api_core import gapic_v1 from google.api_core import grpc_helpers from google.api_core import grpc_helpers_async -from google.api_core import operation_async +from google.api_core import operation_async # type: ignore from google.api_core import operations_v1 from google.auth import credentials from google.auth.exceptions import MutualTLSChannelError @@ -52,6 +52,17 @@ def client_cert_source_callback(): return b"cert bytes", b"key bytes" +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + def test__get_default_mtls_endpoint(): api_endpoint = "example.googleapis.com" api_mtls_endpoint = "example.mtls.googleapis.com" @@ -86,12 +97,12 @@ def test_cloud_build_client_from_service_account_file(client_class): ) as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds client = client_class.from_service_account_json("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds - assert client._transport._host == "cloudbuild.googleapis.com:443" + assert client.transport._host == "cloudbuild.googleapis.com:443" def test_cloud_build_client_get_transport_class(): @@ -113,6 +124,14 @@ def test_cloud_build_client_get_transport_class(): ), ], ) +@mock.patch.object( + CloudBuildClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildClient) +) +@mock.patch.object( + CloudBuildAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudBuildAsyncClient), +) def test_cloud_build_client_client_options( client_class, transport_class, transport_name ): @@ -137,103 +156,205 @@ def test_cloud_build_client_client_options( credentials_file=None, host="squid.clam.whelk", scopes=None, - api_mtls_endpoint="squid.clam.whelk", - client_cert_source=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". - os.environ["GOOGLE_API_USE_MTLS"] = "never" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is - # "always". - os.environ["GOOGLE_API_USE_MTLS"] = "always" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, - ) - - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and client_cert_source is provided. - os.environ["GOOGLE_API_USE_MTLS"] = "auto" - options = client_options.ClientOptions( - client_cert_source=client_cert_source_callback - ) - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=client_cert_source_callback, - ) - - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and default_client_cert_source is provided. - os.environ["GOOGLE_API_USE_MTLS"] = "auto" - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=True, - ): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, + host=client.DEFAULT_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", but client_cert_source and default_client_cert_source are None. - os.environ["GOOGLE_API_USE_MTLS"] = "auto" - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=False, - ): + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has # unsupported value. - os.environ["GOOGLE_API_USE_MTLS"] = "Unsupported" - with pytest.raises(MutualTLSChannelError): - client = client_class() + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() - del os.environ["GOOGLE_API_USE_MTLS"] + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc", "true"), + ( + CloudBuildAsyncClient, + transports.CloudBuildGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (CloudBuildClient, transports.CloudBuildGrpcTransport, "grpc", "false"), + ( + CloudBuildAsyncClient, + transports.CloudBuildGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + CloudBuildClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudBuildClient) +) +@mock.patch.object( + CloudBuildAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(CloudBuildAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_cloud_build_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + ssl_channel_creds = mock.Mock() + with mock.patch( + "grpc.ssl_channel_credentials", return_value=ssl_channel_creds + ): + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_ssl_channel_creds = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_ssl_channel_creds = ssl_channel_creds + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + ): + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.ssl_credentials", + new_callable=mock.PropertyMock, + ) as ssl_credentials_mock: + if use_client_cert_env == "false": + is_mtls_mock.return_value = False + ssl_credentials_mock.return_value = None + expected_host = client.DEFAULT_ENDPOINT + expected_ssl_channel_creds = None + else: + is_mtls_mock.return_value = True + ssl_credentials_mock.return_value = mock.Mock() + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_ssl_channel_creds = ( + ssl_credentials_mock.return_value + ) + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + ): + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + is_mtls_mock.return_value = False + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) @pytest.mark.parametrize( @@ -260,8 +381,9 @@ def test_cloud_build_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -289,8 +411,9 @@ def test_cloud_build_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -305,22 +428,25 @@ def test_cloud_build_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - api_mtls_endpoint="squid.clam.whelk", - client_cert_source=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) -def test_create_build(transport: str = "grpc"): +def test_create_build( + transport: str = "grpc", request_type=cloudbuild.CreateBuildRequest +): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.CreateBuildRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_build), "__call__") as call: + with mock.patch.object(type(client.transport.create_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") @@ -330,26 +456,30 @@ def test_create_build(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.CreateBuildRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +def test_create_build_from_dict(): + test_create_build(request_type=dict) + + @pytest.mark.asyncio -async def test_create_build_async(transport: str = "grpc_asyncio"): +async def test_create_build_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.CreateBuildRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.CreateBuildRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_build), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") @@ -361,24 +491,29 @@ async def test_create_build_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.CreateBuildRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +@pytest.mark.asyncio +async def test_create_build_async_from_dict(): + await test_create_build_async(request_type=dict) + + def test_create_build_flattened(): client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_build), "__call__") as call: + with mock.patch.object(type(client.transport.create_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_build( - project_id="project_id_value", build=cloudbuild.Build(id="id_value"), + project_id="project_id_value", build=cloudbuild.Build(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -388,7 +523,7 @@ def test_create_build_flattened(): assert args[0].project_id == "project_id_value" - assert args[0].build == cloudbuild.Build(id="id_value") + assert args[0].build == cloudbuild.Build(name="name_value") def test_create_build_flattened_error(): @@ -400,7 +535,7 @@ def test_create_build_flattened_error(): client.create_build( cloudbuild.CreateBuildRequest(), project_id="project_id_value", - build=cloudbuild.Build(id="id_value"), + build=cloudbuild.Build(name="name_value"), ) @@ -409,9 +544,7 @@ async def test_create_build_flattened_async(): client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_build), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -421,7 +554,7 @@ async def test_create_build_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_build( - project_id="project_id_value", build=cloudbuild.Build(id="id_value"), + project_id="project_id_value", build=cloudbuild.Build(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -431,7 +564,7 @@ async def test_create_build_flattened_async(): assert args[0].project_id == "project_id_value" - assert args[0].build == cloudbuild.Build(id="id_value") + assert args[0].build == cloudbuild.Build(name="name_value") @pytest.mark.asyncio @@ -444,23 +577,24 @@ async def test_create_build_flattened_error_async(): await client.create_build( cloudbuild.CreateBuildRequest(), project_id="project_id_value", - build=cloudbuild.Build(id="id_value"), + build=cloudbuild.Build(name="name_value"), ) -def test_get_build(transport: str = "grpc"): +def test_get_build(transport: str = "grpc", request_type=cloudbuild.GetBuildRequest): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.GetBuildRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_build), "__call__") as call: + with mock.patch.object(type(client.transport.get_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.Build( + name="name_value", id="id_value", project_id="project_id_value", status=cloudbuild.Build.Status.QUEUED, @@ -470,6 +604,7 @@ def test_get_build(transport: str = "grpc"): build_trigger_id="build_trigger_id_value", log_url="log_url_value", tags=["tags_value"], + service_account="service_account_value", ) response = client.get_build(request) @@ -478,11 +613,14 @@ def test_get_build(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.GetBuildRequest() # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.Build) + assert response.name == "name_value" + assert response.id == "id_value" assert response.project_id == "project_id_value" @@ -501,24 +639,31 @@ def test_get_build(transport: str = "grpc"): assert response.tags == ["tags_value"] + assert response.service_account == "service_account_value" + + +def test_get_build_from_dict(): + test_get_build(request_type=dict) + @pytest.mark.asyncio -async def test_get_build_async(transport: str = "grpc_asyncio"): +async def test_get_build_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.GetBuildRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.GetBuildRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_build), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloudbuild.Build( + name="name_value", id="id_value", project_id="project_id_value", status=cloudbuild.Build.Status.QUEUED, @@ -528,6 +673,7 @@ async def test_get_build_async(transport: str = "grpc_asyncio"): build_trigger_id="build_trigger_id_value", log_url="log_url_value", tags=["tags_value"], + service_account="service_account_value", ) ) @@ -537,11 +683,13 @@ async def test_get_build_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.GetBuildRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.Build) + assert response.name == "name_value" + assert response.id == "id_value" assert response.project_id == "project_id_value" @@ -560,12 +708,19 @@ async def test_get_build_async(transport: str = "grpc_asyncio"): assert response.tags == ["tags_value"] + assert response.service_account == "service_account_value" + + +@pytest.mark.asyncio +async def test_get_build_async_from_dict(): + await test_get_build_async(request_type=dict) + def test_get_build_flattened(): client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_build), "__call__") as call: + with mock.patch.object(type(client.transport.get_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.Build() @@ -601,9 +756,7 @@ async def test_get_build_flattened_async(): client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_build), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.Build() @@ -634,17 +787,19 @@ async def test_get_build_flattened_error_async(): ) -def test_list_builds(transport: str = "grpc"): +def test_list_builds( + transport: str = "grpc", request_type=cloudbuild.ListBuildsRequest +): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.ListBuildsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_builds), "__call__") as call: + with mock.patch.object(type(client.transport.list_builds), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.ListBuildsResponse( next_page_token="next_page_token_value", @@ -656,28 +811,33 @@ def test_list_builds(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.ListBuildsRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBuildsPager) assert response.next_page_token == "next_page_token_value" +def test_list_builds_from_dict(): + test_list_builds(request_type=dict) + + @pytest.mark.asyncio -async def test_list_builds_async(transport: str = "grpc_asyncio"): +async def test_list_builds_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.ListBuildsRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.ListBuildsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_builds), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_builds), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloudbuild.ListBuildsResponse(next_page_token="next_page_token_value",) @@ -689,7 +849,7 @@ async def test_list_builds_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.ListBuildsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBuildsAsyncPager) @@ -697,11 +857,16 @@ async def test_list_builds_async(transport: str = "grpc_asyncio"): assert response.next_page_token == "next_page_token_value" +@pytest.mark.asyncio +async def test_list_builds_async_from_dict(): + await test_list_builds_async(request_type=dict) + + def test_list_builds_flattened(): client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_builds), "__call__") as call: + with mock.patch.object(type(client.transport.list_builds), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.ListBuildsResponse() @@ -739,9 +904,7 @@ async def test_list_builds_flattened_async(): client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_builds), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_builds), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.ListBuildsResponse() @@ -782,7 +945,7 @@ def test_list_builds_pager(): client = CloudBuildClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_builds), "__call__") as call: + with mock.patch.object(type(client.transport.list_builds), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( cloudbuild.ListBuildsResponse( @@ -813,7 +976,7 @@ def test_list_builds_pages(): client = CloudBuildClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_builds), "__call__") as call: + with mock.patch.object(type(client.transport.list_builds), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( cloudbuild.ListBuildsResponse( @@ -830,8 +993,8 @@ def test_list_builds_pages(): RuntimeError, ) pages = list(client.list_builds(request={}).pages) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.asyncio @@ -840,9 +1003,7 @@ async def test_list_builds_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_builds), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_builds), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -875,9 +1036,7 @@ async def test_list_builds_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_builds), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_builds), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -895,25 +1054,28 @@ async def test_list_builds_async_pages(): RuntimeError, ) pages = [] - async for page in (await client.list_builds(request={})).pages: - pages.append(page) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + async for page_ in (await client.list_builds(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_cancel_build(transport: str = "grpc"): +def test_cancel_build( + transport: str = "grpc", request_type=cloudbuild.CancelBuildRequest +): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.CancelBuildRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.cancel_build), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.Build( + name="name_value", id="id_value", project_id="project_id_value", status=cloudbuild.Build.Status.QUEUED, @@ -923,6 +1085,7 @@ def test_cancel_build(transport: str = "grpc"): build_trigger_id="build_trigger_id_value", log_url="log_url_value", tags=["tags_value"], + service_account="service_account_value", ) response = client.cancel_build(request) @@ -931,11 +1094,14 @@ def test_cancel_build(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.CancelBuildRequest() # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.Build) + assert response.name == "name_value" + assert response.id == "id_value" assert response.project_id == "project_id_value" @@ -954,24 +1120,31 @@ def test_cancel_build(transport: str = "grpc"): assert response.tags == ["tags_value"] + assert response.service_account == "service_account_value" + + +def test_cancel_build_from_dict(): + test_cancel_build(request_type=dict) + @pytest.mark.asyncio -async def test_cancel_build_async(transport: str = "grpc_asyncio"): +async def test_cancel_build_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.CancelBuildRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.CancelBuildRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.cancel_build), "__call__" - ) as call: + with mock.patch.object(type(client.transport.cancel_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloudbuild.Build( + name="name_value", id="id_value", project_id="project_id_value", status=cloudbuild.Build.Status.QUEUED, @@ -981,6 +1154,7 @@ async def test_cancel_build_async(transport: str = "grpc_asyncio"): build_trigger_id="build_trigger_id_value", log_url="log_url_value", tags=["tags_value"], + service_account="service_account_value", ) ) @@ -990,11 +1164,13 @@ async def test_cancel_build_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.CancelBuildRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.Build) + assert response.name == "name_value" + assert response.id == "id_value" assert response.project_id == "project_id_value" @@ -1013,12 +1189,19 @@ async def test_cancel_build_async(transport: str = "grpc_asyncio"): assert response.tags == ["tags_value"] + assert response.service_account == "service_account_value" + + +@pytest.mark.asyncio +async def test_cancel_build_async_from_dict(): + await test_cancel_build_async(request_type=dict) + def test_cancel_build_flattened(): client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.cancel_build), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.Build() @@ -1056,9 +1239,7 @@ async def test_cancel_build_flattened_async(): client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.cancel_build), "__call__" - ) as call: + with mock.patch.object(type(client.transport.cancel_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.Build() @@ -1093,17 +1274,19 @@ async def test_cancel_build_flattened_error_async(): ) -def test_retry_build(transport: str = "grpc"): +def test_retry_build( + transport: str = "grpc", request_type=cloudbuild.RetryBuildRequest +): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.RetryBuildRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.retry_build), "__call__") as call: + with mock.patch.object(type(client.transport.retry_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") @@ -1113,26 +1296,30 @@ def test_retry_build(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.RetryBuildRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +def test_retry_build_from_dict(): + test_retry_build(request_type=dict) + + @pytest.mark.asyncio -async def test_retry_build_async(transport: str = "grpc_asyncio"): +async def test_retry_build_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.RetryBuildRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.RetryBuildRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.retry_build), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( operations_pb2.Operation(name="operations/spam") @@ -1144,17 +1331,22 @@ async def test_retry_build_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.RetryBuildRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +@pytest.mark.asyncio +async def test_retry_build_async_from_dict(): + await test_retry_build_async(request_type=dict) + + def test_retry_build_flattened(): client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.retry_build), "__call__") as call: + with mock.patch.object(type(client.transport.retry_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -1192,9 +1384,7 @@ async def test_retry_build_flattened_async(): client = CloudBuildAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.retry_build), "__call__" - ) as call: + with mock.patch.object(type(client.transport.retry_build), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -1231,18 +1421,20 @@ async def test_retry_build_flattened_error_async(): ) -def test_create_build_trigger(transport: str = "grpc"): +def test_create_build_trigger( + transport: str = "grpc", request_type=cloudbuild.CreateBuildTriggerRequest +): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.CreateBuildTriggerRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_build_trigger), "__call__" + type(client.transport.create_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger( @@ -1253,7 +1445,7 @@ def test_create_build_trigger(transport: str = "grpc"): disabled=True, ignored_files=["ignored_files_value"], included_files=["included_files_value"], - build=cloudbuild.Build(id="id_value"), + build=cloudbuild.Build(name="name_value"), ) response = client.create_build_trigger(request) @@ -1262,9 +1454,10 @@ def test_create_build_trigger(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.CreateBuildTriggerRequest() # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.BuildTrigger) assert response.id == "id_value" @@ -1282,19 +1475,25 @@ def test_create_build_trigger(transport: str = "grpc"): assert response.included_files == ["included_files_value"] +def test_create_build_trigger_from_dict(): + test_create_build_trigger(request_type=dict) + + @pytest.mark.asyncio -async def test_create_build_trigger_async(transport: str = "grpc_asyncio"): +async def test_create_build_trigger_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.CreateBuildTriggerRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.CreateBuildTriggerRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_build_trigger), "__call__" + type(client.transport.create_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -1315,7 +1514,7 @@ async def test_create_build_trigger_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.CreateBuildTriggerRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.BuildTrigger) @@ -1335,12 +1534,17 @@ async def test_create_build_trigger_async(transport: str = "grpc_asyncio"): assert response.included_files == ["included_files_value"] +@pytest.mark.asyncio +async def test_create_build_trigger_async_from_dict(): + await test_create_build_trigger_async(request_type=dict) + + def test_create_build_trigger_flattened(): client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_build_trigger), "__call__" + type(client.transport.create_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger() @@ -1381,7 +1585,7 @@ async def test_create_build_trigger_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_build_trigger), "__call__" + type(client.transport.create_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger() @@ -1420,18 +1624,20 @@ async def test_create_build_trigger_flattened_error_async(): ) -def test_get_build_trigger(transport: str = "grpc"): +def test_get_build_trigger( + transport: str = "grpc", request_type=cloudbuild.GetBuildTriggerRequest +): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.GetBuildTriggerRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.get_build_trigger), "__call__" + type(client.transport.get_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger( @@ -1442,7 +1648,7 @@ def test_get_build_trigger(transport: str = "grpc"): disabled=True, ignored_files=["ignored_files_value"], included_files=["included_files_value"], - build=cloudbuild.Build(id="id_value"), + build=cloudbuild.Build(name="name_value"), ) response = client.get_build_trigger(request) @@ -1451,9 +1657,10 @@ def test_get_build_trigger(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.GetBuildTriggerRequest() # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.BuildTrigger) assert response.id == "id_value" @@ -1471,19 +1678,25 @@ def test_get_build_trigger(transport: str = "grpc"): assert response.included_files == ["included_files_value"] +def test_get_build_trigger_from_dict(): + test_get_build_trigger(request_type=dict) + + @pytest.mark.asyncio -async def test_get_build_trigger_async(transport: str = "grpc_asyncio"): +async def test_get_build_trigger_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.GetBuildTriggerRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.GetBuildTriggerRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.get_build_trigger), "__call__" + type(client.transport.get_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -1504,7 +1717,7 @@ async def test_get_build_trigger_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.GetBuildTriggerRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.BuildTrigger) @@ -1524,12 +1737,17 @@ async def test_get_build_trigger_async(transport: str = "grpc_asyncio"): assert response.included_files == ["included_files_value"] +@pytest.mark.asyncio +async def test_get_build_trigger_async_from_dict(): + await test_get_build_trigger_async(request_type=dict) + + def test_get_build_trigger_flattened(): client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.get_build_trigger), "__call__" + type(client.transport.get_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger() @@ -1569,7 +1787,7 @@ async def test_get_build_trigger_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.get_build_trigger), "__call__" + type(client.transport.get_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger() @@ -1607,18 +1825,20 @@ async def test_get_build_trigger_flattened_error_async(): ) -def test_list_build_triggers(transport: str = "grpc"): +def test_list_build_triggers( + transport: str = "grpc", request_type=cloudbuild.ListBuildTriggersRequest +): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.ListBuildTriggersRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_build_triggers), "__call__" + type(client.transport.list_build_triggers), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.ListBuildTriggersResponse( @@ -1631,27 +1851,34 @@ def test_list_build_triggers(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.ListBuildTriggersRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListBuildTriggersPager) assert response.next_page_token == "next_page_token_value" +def test_list_build_triggers_from_dict(): + test_list_build_triggers(request_type=dict) + + @pytest.mark.asyncio -async def test_list_build_triggers_async(transport: str = "grpc_asyncio"): +async def test_list_build_triggers_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.ListBuildTriggersRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.ListBuildTriggersRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_build_triggers), "__call__" + type(client.transport.list_build_triggers), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -1666,7 +1893,7 @@ async def test_list_build_triggers_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.ListBuildTriggersRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListBuildTriggersAsyncPager) @@ -1674,12 +1901,17 @@ async def test_list_build_triggers_async(transport: str = "grpc_asyncio"): assert response.next_page_token == "next_page_token_value" +@pytest.mark.asyncio +async def test_list_build_triggers_async_from_dict(): + await test_list_build_triggers_async(request_type=dict) + + def test_list_build_triggers_flattened(): client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_build_triggers), "__call__" + type(client.transport.list_build_triggers), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.ListBuildTriggersResponse() @@ -1713,7 +1945,7 @@ async def test_list_build_triggers_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_build_triggers), "__call__" + type(client.transport.list_build_triggers), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.ListBuildTriggersResponse() @@ -1750,7 +1982,7 @@ def test_list_build_triggers_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_build_triggers), "__call__" + type(client.transport.list_build_triggers), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -1787,7 +2019,7 @@ def test_list_build_triggers_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_build_triggers), "__call__" + type(client.transport.list_build_triggers), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -1809,8 +2041,8 @@ def test_list_build_triggers_pages(): RuntimeError, ) pages = list(client.list_build_triggers(request={}).pages) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.asyncio @@ -1819,7 +2051,7 @@ async def test_list_build_triggers_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_build_triggers), + type(client.transport.list_build_triggers), "__call__", new_callable=mock.AsyncMock, ) as call: @@ -1858,7 +2090,7 @@ async def test_list_build_triggers_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_build_triggers), + type(client.transport.list_build_triggers), "__call__", new_callable=mock.AsyncMock, ) as call: @@ -1882,24 +2114,26 @@ async def test_list_build_triggers_async_pages(): RuntimeError, ) pages = [] - async for page in (await client.list_build_triggers(request={})).pages: - pages.append(page) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + async for page_ in (await client.list_build_triggers(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_delete_build_trigger(transport: str = "grpc"): +def test_delete_build_trigger( + transport: str = "grpc", request_type=cloudbuild.DeleteBuildTriggerRequest +): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.DeleteBuildTriggerRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_build_trigger), "__call__" + type(client.transport.delete_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -1910,25 +2144,31 @@ def test_delete_build_trigger(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.DeleteBuildTriggerRequest() # Establish that the response is the type that we expect. assert response is None +def test_delete_build_trigger_from_dict(): + test_delete_build_trigger(request_type=dict) + + @pytest.mark.asyncio -async def test_delete_build_trigger_async(transport: str = "grpc_asyncio"): +async def test_delete_build_trigger_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.DeleteBuildTriggerRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.DeleteBuildTriggerRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_build_trigger), "__call__" + type(client.transport.delete_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -1939,18 +2179,23 @@ async def test_delete_build_trigger_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.DeleteBuildTriggerRequest() # Establish that the response is the type that we expect. assert response is None +@pytest.mark.asyncio +async def test_delete_build_trigger_async_from_dict(): + await test_delete_build_trigger_async(request_type=dict) + + def test_delete_build_trigger_flattened(): client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_build_trigger), "__call__" + type(client.transport.delete_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -1990,7 +2235,7 @@ async def test_delete_build_trigger_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_build_trigger), "__call__" + type(client.transport.delete_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2026,18 +2271,20 @@ async def test_delete_build_trigger_flattened_error_async(): ) -def test_update_build_trigger(transport: str = "grpc"): +def test_update_build_trigger( + transport: str = "grpc", request_type=cloudbuild.UpdateBuildTriggerRequest +): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.UpdateBuildTriggerRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_build_trigger), "__call__" + type(client.transport.update_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger( @@ -2048,7 +2295,7 @@ def test_update_build_trigger(transport: str = "grpc"): disabled=True, ignored_files=["ignored_files_value"], included_files=["included_files_value"], - build=cloudbuild.Build(id="id_value"), + build=cloudbuild.Build(name="name_value"), ) response = client.update_build_trigger(request) @@ -2057,9 +2304,10 @@ def test_update_build_trigger(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.UpdateBuildTriggerRequest() # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.BuildTrigger) assert response.id == "id_value" @@ -2077,19 +2325,25 @@ def test_update_build_trigger(transport: str = "grpc"): assert response.included_files == ["included_files_value"] +def test_update_build_trigger_from_dict(): + test_update_build_trigger(request_type=dict) + + @pytest.mark.asyncio -async def test_update_build_trigger_async(transport: str = "grpc_asyncio"): +async def test_update_build_trigger_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.UpdateBuildTriggerRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.UpdateBuildTriggerRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_build_trigger), "__call__" + type(client.transport.update_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -2110,7 +2364,7 @@ async def test_update_build_trigger_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.UpdateBuildTriggerRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.BuildTrigger) @@ -2130,12 +2384,17 @@ async def test_update_build_trigger_async(transport: str = "grpc_asyncio"): assert response.included_files == ["included_files_value"] +@pytest.mark.asyncio +async def test_update_build_trigger_async_from_dict(): + await test_update_build_trigger_async(request_type=dict) + + def test_update_build_trigger_flattened(): client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_build_trigger), "__call__" + type(client.transport.update_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger() @@ -2180,7 +2439,7 @@ async def test_update_build_trigger_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_build_trigger), "__call__" + type(client.transport.update_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.BuildTrigger() @@ -2223,18 +2482,20 @@ async def test_update_build_trigger_flattened_error_async(): ) -def test_run_build_trigger(transport: str = "grpc"): +def test_run_build_trigger( + transport: str = "grpc", request_type=cloudbuild.RunBuildTriggerRequest +): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.RunBuildTriggerRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.run_build_trigger), "__call__" + type(client.transport.run_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/spam") @@ -2245,25 +2506,31 @@ def test_run_build_trigger(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.RunBuildTriggerRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +def test_run_build_trigger_from_dict(): + test_run_build_trigger(request_type=dict) + + @pytest.mark.asyncio -async def test_run_build_trigger_async(transport: str = "grpc_asyncio"): +async def test_run_build_trigger_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.RunBuildTriggerRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.RunBuildTriggerRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.run_build_trigger), "__call__" + type(client.transport.run_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -2276,18 +2543,23 @@ async def test_run_build_trigger_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.RunBuildTriggerRequest() # Establish that the response is the type that we expect. assert isinstance(response, future.Future) +@pytest.mark.asyncio +async def test_run_build_trigger_async_from_dict(): + await test_run_build_trigger_async(request_type=dict) + + def test_run_build_trigger_flattened(): client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.run_build_trigger), "__call__" + type(client.transport.run_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -2332,7 +2604,7 @@ async def test_run_build_trigger_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.run_build_trigger), "__call__" + type(client.transport.run_build_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = operations_pb2.Operation(name="operations/op") @@ -2375,18 +2647,20 @@ async def test_run_build_trigger_flattened_error_async(): ) -def test_create_worker_pool(transport: str = "grpc"): +def test_create_worker_pool( + transport: str = "grpc", request_type=cloudbuild.CreateWorkerPoolRequest +): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.CreateWorkerPoolRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_worker_pool), "__call__" + type(client.transport.create_worker_pool), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.WorkerPool( @@ -2404,9 +2678,10 @@ def test_create_worker_pool(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.CreateWorkerPoolRequest() # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.WorkerPool) assert response.name == "name_value" @@ -2422,19 +2697,25 @@ def test_create_worker_pool(transport: str = "grpc"): assert response.status == cloudbuild.WorkerPool.Status.CREATING +def test_create_worker_pool_from_dict(): + test_create_worker_pool(request_type=dict) + + @pytest.mark.asyncio -async def test_create_worker_pool_async(transport: str = "grpc_asyncio"): +async def test_create_worker_pool_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.CreateWorkerPoolRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.CreateWorkerPoolRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_worker_pool), "__call__" + type(client.transport.create_worker_pool), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -2454,7 +2735,7 @@ async def test_create_worker_pool_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.CreateWorkerPoolRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.WorkerPool) @@ -2472,17 +2753,24 @@ async def test_create_worker_pool_async(transport: str = "grpc_asyncio"): assert response.status == cloudbuild.WorkerPool.Status.CREATING -def test_get_worker_pool(transport: str = "grpc"): +@pytest.mark.asyncio +async def test_create_worker_pool_async_from_dict(): + await test_create_worker_pool_async(request_type=dict) + + +def test_get_worker_pool( + transport: str = "grpc", request_type=cloudbuild.GetWorkerPoolRequest +): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.GetWorkerPoolRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_worker_pool), "__call__") as call: + with mock.patch.object(type(client.transport.get_worker_pool), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.WorkerPool( name="name_value", @@ -2499,9 +2787,10 @@ def test_get_worker_pool(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.GetWorkerPoolRequest() # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.WorkerPool) assert response.name == "name_value" @@ -2517,20 +2806,24 @@ def test_get_worker_pool(transport: str = "grpc"): assert response.status == cloudbuild.WorkerPool.Status.CREATING +def test_get_worker_pool_from_dict(): + test_get_worker_pool(request_type=dict) + + @pytest.mark.asyncio -async def test_get_worker_pool_async(transport: str = "grpc_asyncio"): +async def test_get_worker_pool_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.GetWorkerPoolRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.GetWorkerPoolRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_worker_pool), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_worker_pool), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( cloudbuild.WorkerPool( @@ -2549,7 +2842,7 @@ async def test_get_worker_pool_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.GetWorkerPoolRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.WorkerPool) @@ -2567,18 +2860,25 @@ async def test_get_worker_pool_async(transport: str = "grpc_asyncio"): assert response.status == cloudbuild.WorkerPool.Status.CREATING -def test_delete_worker_pool(transport: str = "grpc"): +@pytest.mark.asyncio +async def test_get_worker_pool_async_from_dict(): + await test_get_worker_pool_async(request_type=dict) + + +def test_delete_worker_pool( + transport: str = "grpc", request_type=cloudbuild.DeleteWorkerPoolRequest +): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.DeleteWorkerPoolRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_worker_pool), "__call__" + type(client.transport.delete_worker_pool), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2589,25 +2889,31 @@ def test_delete_worker_pool(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.DeleteWorkerPoolRequest() # Establish that the response is the type that we expect. assert response is None +def test_delete_worker_pool_from_dict(): + test_delete_worker_pool(request_type=dict) + + @pytest.mark.asyncio -async def test_delete_worker_pool_async(transport: str = "grpc_asyncio"): +async def test_delete_worker_pool_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.DeleteWorkerPoolRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.DeleteWorkerPoolRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_worker_pool), "__call__" + type(client.transport.delete_worker_pool), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -2618,24 +2924,31 @@ async def test_delete_worker_pool_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.DeleteWorkerPoolRequest() # Establish that the response is the type that we expect. assert response is None -def test_update_worker_pool(transport: str = "grpc"): +@pytest.mark.asyncio +async def test_delete_worker_pool_async_from_dict(): + await test_delete_worker_pool_async(request_type=dict) + + +def test_update_worker_pool( + transport: str = "grpc", request_type=cloudbuild.UpdateWorkerPoolRequest +): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.UpdateWorkerPoolRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_worker_pool), "__call__" + type(client.transport.update_worker_pool), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.WorkerPool( @@ -2653,9 +2966,10 @@ def test_update_worker_pool(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.UpdateWorkerPoolRequest() # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.WorkerPool) assert response.name == "name_value" @@ -2671,19 +2985,25 @@ def test_update_worker_pool(transport: str = "grpc"): assert response.status == cloudbuild.WorkerPool.Status.CREATING +def test_update_worker_pool_from_dict(): + test_update_worker_pool(request_type=dict) + + @pytest.mark.asyncio -async def test_update_worker_pool_async(transport: str = "grpc_asyncio"): +async def test_update_worker_pool_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.UpdateWorkerPoolRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.UpdateWorkerPoolRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_worker_pool), "__call__" + type(client.transport.update_worker_pool), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -2703,7 +3023,7 @@ async def test_update_worker_pool_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.UpdateWorkerPoolRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.WorkerPool) @@ -2721,18 +3041,25 @@ async def test_update_worker_pool_async(transport: str = "grpc_asyncio"): assert response.status == cloudbuild.WorkerPool.Status.CREATING -def test_list_worker_pools(transport: str = "grpc"): +@pytest.mark.asyncio +async def test_update_worker_pool_async_from_dict(): + await test_update_worker_pool_async(request_type=dict) + + +def test_list_worker_pools( + transport: str = "grpc", request_type=cloudbuild.ListWorkerPoolsRequest +): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.ListWorkerPoolsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_worker_pools), "__call__" + type(client.transport.list_worker_pools), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = cloudbuild.ListWorkerPoolsResponse() @@ -2743,25 +3070,32 @@ def test_list_worker_pools(transport: str = "grpc"): assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.ListWorkerPoolsRequest() # Establish that the response is the type that we expect. + assert isinstance(response, cloudbuild.ListWorkerPoolsResponse) +def test_list_worker_pools_from_dict(): + test_list_worker_pools(request_type=dict) + + @pytest.mark.asyncio -async def test_list_worker_pools_async(transport: str = "grpc_asyncio"): +async def test_list_worker_pools_async( + transport: str = "grpc_asyncio", request_type=cloudbuild.ListWorkerPoolsRequest +): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = cloudbuild.ListWorkerPoolsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_worker_pools), "__call__" + type(client.transport.list_worker_pools), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -2774,12 +3108,17 @@ async def test_list_worker_pools_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == cloudbuild.ListWorkerPoolsRequest() # Establish that the response is the type that we expect. assert isinstance(response, cloudbuild.ListWorkerPoolsResponse) +@pytest.mark.asyncio +async def test_list_worker_pools_async_from_dict(): + await test_list_worker_pools_async(request_type=dict) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.CloudBuildGrpcTransport( @@ -2816,7 +3155,7 @@ def test_transport_instance(): credentials=credentials.AnonymousCredentials(), ) client = CloudBuildClient(transport=transport) - assert client._transport is transport + assert client.transport is transport def test_transport_get_channel(): @@ -2834,10 +3173,22 @@ def test_transport_get_channel(): assert channel +@pytest.mark.parametrize( + "transport_class", + [transports.CloudBuildGrpcTransport, transports.CloudBuildGrpcAsyncIOTransport], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = CloudBuildClient(credentials=credentials.AnonymousCredentials(),) - assert isinstance(client._transport, transports.CloudBuildGrpcTransport,) + assert isinstance(client.transport, transports.CloudBuildGrpcTransport,) def test_cloud_build_base_transport_error(): @@ -2851,9 +3202,13 @@ def test_cloud_build_base_transport_error(): def test_cloud_build_base_transport(): # Instantiate the base transport. - transport = transports.CloudBuildTransport( - credentials=credentials.AnonymousCredentials(), - ) + with mock.patch( + "google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.CloudBuildTransport( + credentials=credentials.AnonymousCredentials(), + ) # Every method on the transport should just blindly # raise NotImplementedError. @@ -2887,22 +3242,42 @@ def test_cloud_build_base_transport(): def test_cloud_build_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file - with mock.patch.object(auth, "load_credentials_from_file") as load_creds: + with mock.patch.object( + auth, "load_credentials_from_file" + ) as load_creds, mock.patch( + "google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None load_creds.return_value = (credentials.AnonymousCredentials(), None) - transport = transports.CloudBuildTransport(credentials_file="credentials.json",) + transport = transports.CloudBuildTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) load_creds.assert_called_once_with( "credentials.json", scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", ) +def test_cloud_build_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(auth, "default") as adc, mock.patch( + "google.cloud.devtools.cloudbuild_v1.services.cloud_build.transports.CloudBuildTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.CloudBuildTransport() + adc.assert_called_once() + + def test_cloud_build_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(auth, "default") as adc: adc.return_value = (credentials.AnonymousCredentials(), None) CloudBuildClient() adc.assert_called_once_with( - scopes=("https://www.googleapis.com/auth/cloud-platform",) + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, ) @@ -2911,9 +3286,12 @@ def test_cloud_build_transport_auth_adc(): # ADC credentials. with mock.patch.object(auth, "default") as adc: adc.return_value = (credentials.AnonymousCredentials(), None) - transports.CloudBuildGrpcTransport(host="squid.clam.whelk") + transports.CloudBuildGrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" + ) adc.assert_called_once_with( - scopes=("https://www.googleapis.com/auth/cloud-platform",) + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", ) @@ -2924,7 +3302,7 @@ def test_cloud_build_host_no_port(): api_endpoint="cloudbuild.googleapis.com" ), ) - assert client._transport._host == "cloudbuild.googleapis.com:443" + assert client.transport._host == "cloudbuild.googleapis.com:443" def test_cloud_build_host_with_port(): @@ -2934,188 +3312,118 @@ def test_cloud_build_host_with_port(): api_endpoint="cloudbuild.googleapis.com:8000" ), ) - assert client._transport._host == "cloudbuild.googleapis.com:8000" + assert client.transport._host == "cloudbuild.googleapis.com:8000" def test_cloud_build_grpc_transport_channel(): channel = grpc.insecure_channel("http://localhost/") - # Check that if channel is provided, mtls endpoint and client_cert_source - # won't be used. - callback = mock.MagicMock() + # Check that channel is used if provided. transport = transports.CloudBuildGrpcTransport( - host="squid.clam.whelk", - channel=channel, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=callback, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" - assert not callback.called + assert transport._ssl_channel_credentials == None def test_cloud_build_grpc_asyncio_transport_channel(): channel = aio.insecure_channel("http://localhost/") - # Check that if channel is provided, mtls endpoint and client_cert_source - # won't be used. - callback = mock.MagicMock() + # Check that channel is used if provided. transport = transports.CloudBuildGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=callback, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" - assert not callback.called - - -@mock.patch("grpc.ssl_channel_credentials", autospec=True) -@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) -def test_cloud_build_grpc_transport_channel_mtls_with_client_cert_source( - grpc_create_channel, grpc_ssl_channel_cred -): - # Check that if channel is None, but api_mtls_endpoint and client_cert_source - # are provided, then a mTLS channel will be created. - mock_cred = mock.Mock() - - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - transport = transports.CloudBuildGrpcTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=mock_ssl_cred, - ) - assert transport.grpc_channel == mock_grpc_channel - - -@mock.patch("grpc.ssl_channel_credentials", autospec=True) -@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) -def test_cloud_build_grpc_asyncio_transport_channel_mtls_with_client_cert_source( - grpc_create_channel, grpc_ssl_channel_cred -): - # Check that if channel is None, but api_mtls_endpoint and client_cert_source - # are provided, then a mTLS channel will be created. - mock_cred = mock.Mock() - - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - transport = transports.CloudBuildGrpcAsyncIOTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=mock_ssl_cred, - ) - assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == None @pytest.mark.parametrize( - "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] + "transport_class", + [transports.CloudBuildGrpcTransport, transports.CloudBuildGrpcAsyncIOTransport], ) -@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) -def test_cloud_build_grpc_transport_channel_mtls_with_adc( - grpc_create_channel, api_mtls_endpoint -): - # Check that if channel and client_cert_source are None, but api_mtls_endpoint - # is provided, then a mTLS channel will be created with SSL ADC. - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - # Mock google.auth.transport.grpc.SslCredentials class. - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - mock_cred = mock.Mock() - transport = transports.CloudBuildGrpcTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint=api_mtls_endpoint, - client_cert_source=None, - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=mock_ssl_cred, - ) - assert transport.grpc_channel == mock_grpc_channel +def test_cloud_build_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred @pytest.mark.parametrize( - "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] + "transport_class", + [transports.CloudBuildGrpcTransport, transports.CloudBuildGrpcAsyncIOTransport], ) -@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) -def test_cloud_build_grpc_asyncio_transport_channel_mtls_with_adc( - grpc_create_channel, api_mtls_endpoint -): - # Check that if channel and client_cert_source are None, but api_mtls_endpoint - # is provided, then a mTLS channel will be created with SSL ADC. - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - # Mock google.auth.transport.grpc.SslCredentials class. +def test_cloud_build_transport_channel_mtls_with_adc(transport_class): mock_ssl_cred = mock.Mock() with mock.patch.multiple( "google.auth.transport.grpc.SslCredentials", __init__=mock.Mock(return_value=None), ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): - mock_cred = mock.Mock() - transport = transports.CloudBuildGrpcAsyncIOTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint=api_mtls_endpoint, - client_cert_source=None, - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=mock_ssl_cred, - ) - assert transport.grpc_channel == mock_grpc_channel + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel def test_cloud_build_grpc_lro_client(): client = CloudBuildClient( credentials=credentials.AnonymousCredentials(), transport="grpc", ) - transport = client._transport + transport = client.transport # Ensure that we have a api-core operations client. assert isinstance(transport.operations_client, operations_v1.OperationsClient,) @@ -3128,10 +3436,199 @@ def test_cloud_build_grpc_lro_async_client(): client = CloudBuildAsyncClient( credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio", ) - transport = client._client._transport + transport = client.transport # Ensure that we have a api-core operations client. assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,) # Ensure that subsequent calls to the property send the exact same object. assert transport.operations_client is transport.operations_client + + +def test_build_path(): + project = "squid" + build = "clam" + + expected = "projects/{project}/builds/{build}".format(project=project, build=build,) + actual = CloudBuildClient.build_path(project, build) + assert expected == actual + + +def test_parse_build_path(): + expected = { + "project": "whelk", + "build": "octopus", + } + path = CloudBuildClient.build_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_build_path(path) + assert expected == actual + + +def test_build_trigger_path(): + project = "oyster" + trigger = "nudibranch" + + expected = "projects/{project}/triggers/{trigger}".format( + project=project, trigger=trigger, + ) + actual = CloudBuildClient.build_trigger_path(project, trigger) + assert expected == actual + + +def test_parse_build_trigger_path(): + expected = { + "project": "cuttlefish", + "trigger": "mussel", + } + path = CloudBuildClient.build_trigger_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_build_trigger_path(path) + assert expected == actual + + +def test_service_account_path(): + project = "winkle" + service_account = "nautilus" + + expected = "projects/{project}/serviceAccounts/{service_account}".format( + project=project, service_account=service_account, + ) + actual = CloudBuildClient.service_account_path(project, service_account) + assert expected == actual + + +def test_parse_service_account_path(): + expected = { + "project": "scallop", + "service_account": "abalone", + } + path = CloudBuildClient.service_account_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_service_account_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "squid" + + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = CloudBuildClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = CloudBuildClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + + expected = "folders/{folder}".format(folder=folder,) + actual = CloudBuildClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = CloudBuildClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + + expected = "organizations/{organization}".format(organization=organization,) + actual = CloudBuildClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = CloudBuildClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + + expected = "projects/{project}".format(project=project,) + actual = CloudBuildClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = CloudBuildClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = CloudBuildClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = CloudBuildClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = CloudBuildClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.CloudBuildTransport, "_prep_wrapped_messages" + ) as prep: + client = CloudBuildClient( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.CloudBuildTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = CloudBuildClient.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info)