diff --git a/.github/snippet-bot.yml b/.github/snippet-bot.yml
new file mode 100644
index 0000000..e69de29
diff --git a/.gitignore b/.gitignore
index b87e1ed..b9daa52 100644
--- a/.gitignore
+++ b/.gitignore
@@ -46,6 +46,7 @@ pip-log.txt
# Built documentation
docs/_build
bigquery/docs/generated
+docs.metadata
# Virtual environment
env/
@@ -57,4 +58,4 @@ system_tests/local_test_setup
# Make sure a generated file isn't accidentally committed.
pylintrc
-pylintrc.test
\ No newline at end of file
+pylintrc.test
diff --git a/.kokoro/build.sh b/.kokoro/build.sh
index 78456f8..762e3c9 100755
--- a/.kokoro/build.sh
+++ b/.kokoro/build.sh
@@ -36,4 +36,10 @@ python3.6 -m pip uninstall --yes --quiet nox-automation
python3.6 -m pip install --upgrade --quiet nox
python3.6 -m nox --version
-python3.6 -m nox
+# If NOX_SESSION is set, it only runs the specified session,
+# otherwise run all the sessions.
+if [[ -n "${NOX_SESSION:-}" ]]; then
+ python3.6 -m nox -s "${NOX_SESSION:-}"
+else
+ python3.6 -m nox
+fi
diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile
new file mode 100644
index 0000000..412b0b5
--- /dev/null
+++ b/.kokoro/docker/docs/Dockerfile
@@ -0,0 +1,98 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from ubuntu:20.04
+
+ENV DEBIAN_FRONTEND noninteractive
+
+# Ensure local Python is preferred over distribution Python.
+ENV PATH /usr/local/bin:$PATH
+
+# Install dependencies.
+RUN apt-get update \
+ && apt-get install -y --no-install-recommends \
+ apt-transport-https \
+ build-essential \
+ ca-certificates \
+ curl \
+ dirmngr \
+ git \
+ gpg-agent \
+ graphviz \
+ libbz2-dev \
+ libdb5.3-dev \
+ libexpat1-dev \
+ libffi-dev \
+ liblzma-dev \
+ libreadline-dev \
+ libsnappy-dev \
+ libssl-dev \
+ libsqlite3-dev \
+ portaudio19-dev \
+ redis-server \
+ software-properties-common \
+ ssh \
+ sudo \
+ tcl \
+ tcl-dev \
+ tk \
+ tk-dev \
+ uuid-dev \
+ wget \
+ zlib1g-dev \
+ && add-apt-repository universe \
+ && apt-get update \
+ && apt-get -y install jq \
+ && apt-get clean autoclean \
+ && apt-get autoremove -y \
+ && rm -rf /var/lib/apt/lists/* \
+ && rm -f /var/cache/apt/archives/*.deb
+
+
+COPY fetch_gpg_keys.sh /tmp
+# Install the desired versions of Python.
+RUN set -ex \
+ && export GNUPGHOME="$(mktemp -d)" \
+ && echo "disable-ipv6" >> "${GNUPGHOME}/dirmngr.conf" \
+ && /tmp/fetch_gpg_keys.sh \
+ && for PYTHON_VERSION in 3.7.8 3.8.5; do \
+ wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz" \
+ && wget --no-check-certificate -O python-${PYTHON_VERSION}.tar.xz.asc "https://www.python.org/ftp/python/${PYTHON_VERSION%%[a-z]*}/Python-$PYTHON_VERSION.tar.xz.asc" \
+ && gpg --batch --verify python-${PYTHON_VERSION}.tar.xz.asc python-${PYTHON_VERSION}.tar.xz \
+ && rm -r python-${PYTHON_VERSION}.tar.xz.asc \
+ && mkdir -p /usr/src/python-${PYTHON_VERSION} \
+ && tar -xJC /usr/src/python-${PYTHON_VERSION} --strip-components=1 -f python-${PYTHON_VERSION}.tar.xz \
+ && rm python-${PYTHON_VERSION}.tar.xz \
+ && cd /usr/src/python-${PYTHON_VERSION} \
+ && ./configure \
+ --enable-shared \
+ # This works only on Python 2.7 and throws a warning on every other
+ # version, but seems otherwise harmless.
+ --enable-unicode=ucs4 \
+ --with-system-ffi \
+ --without-ensurepip \
+ && make -j$(nproc) \
+ && make install \
+ && ldconfig \
+ ; done \
+ && rm -rf "${GNUPGHOME}" \
+ && rm -rf /usr/src/python* \
+ && rm -rf ~/.cache/
+
+RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \
+ && python3.7 /tmp/get-pip.py \
+ && python3.8 /tmp/get-pip.py \
+ && rm /tmp/get-pip.py
+
+CMD ["python3.7"]
diff --git a/.kokoro/docker/docs/fetch_gpg_keys.sh b/.kokoro/docker/docs/fetch_gpg_keys.sh
new file mode 100755
index 0000000..d653dd8
--- /dev/null
+++ b/.kokoro/docker/docs/fetch_gpg_keys.sh
@@ -0,0 +1,45 @@
+#!/bin/bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# A script to fetch gpg keys with retry.
+# Avoid jinja parsing the file.
+#
+
+function retry {
+ if [[ "${#}" -le 1 ]]; then
+ echo "Usage: ${0} retry_count commands.."
+ exit 1
+ fi
+ local retries=${1}
+ local command="${@:2}"
+ until [[ "${retries}" -le 0 ]]; do
+ $command && return 0
+ if [[ $? -ne 0 ]]; then
+ echo "command failed, retrying"
+ ((retries--))
+ fi
+ done
+ return 1
+}
+
+# 3.6.9, 3.7.5 (Ned Deily)
+retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \
+ 0D96DF4D4110E5C43FBFB17F2D347EA6AA65421D
+
+# 3.8.0 (Łukasz Langa)
+retry 3 gpg --keyserver ha.pool.sks-keyservers.net --recv-keys \
+ E3FF2839C048B25C084DEBE9B26995E310250568
+
+#
diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg
index 6faa595..9d1163e 100644
--- a/.kokoro/docs/common.cfg
+++ b/.kokoro/docs/common.cfg
@@ -11,12 +11,12 @@ action {
gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
# Use the trampoline script to run in docker.
-build_file: "python-notebooks/.kokoro/trampoline.sh"
+build_file: "python-notebooks/.kokoro/trampoline_v2.sh"
# Configure the docker image for kokoro-trampoline.
env_vars: {
key: "TRAMPOLINE_IMAGE"
- value: "gcr.io/cloud-devrel-kokoro-resources/python-multi"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-lib-docs"
}
env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
@@ -28,6 +28,23 @@ env_vars: {
value: "docs-staging"
}
+env_vars: {
+ key: "V2_STAGING_BUCKET"
+ value: "docs-staging-v2"
+}
+
+# It will upload the docker image after successful builds.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE_UPLOAD"
+ value: "true"
+}
+
+# It will always build the docker image.
+env_vars: {
+ key: "TRAMPOLINE_DOCKERFILE"
+ value: ".kokoro/docker/docs/Dockerfile"
+}
+
# Fetch the token needed for reporting release status to GitHub
before_action {
fetch_keystore {
diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg
new file mode 100644
index 0000000..1118107
--- /dev/null
+++ b/.kokoro/docs/docs-presubmit.cfg
@@ -0,0 +1,17 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "STAGING_BUCKET"
+ value: "gcloud-python-test"
+}
+
+env_vars: {
+ key: "V2_STAGING_BUCKET"
+ value: "gcloud-python-test"
+}
+
+# We only upload the image in the main `docs` build.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE_UPLOAD"
+ value: "false"
+}
diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh
new file mode 100755
index 0000000..f525142
--- /dev/null
+++ b/.kokoro/populate-secrets.sh
@@ -0,0 +1,43 @@
+#!/bin/bash
+# Copyright 2020 Google LLC.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eo pipefail
+
+function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;}
+function msg { println "$*" >&2 ;}
+function println { printf '%s\n' "$(now) $*" ;}
+
+
+# Populates requested secrets set in SECRET_MANAGER_KEYS from service account:
+# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com
+SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager"
+msg "Creating folder on disk for secrets: ${SECRET_LOCATION}"
+mkdir -p ${SECRET_LOCATION}
+for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g")
+do
+ msg "Retrieving secret ${key}"
+ docker run --entrypoint=gcloud \
+ --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \
+ gcr.io/google.com/cloudsdktool/cloud-sdk \
+ secrets versions access latest \
+ --project cloud-devrel-kokoro-resources \
+ --secret ${key} > \
+ "${SECRET_LOCATION}/${key}"
+ if [[ $? == 0 ]]; then
+ msg "Secret written to ${SECRET_LOCATION}/${key}"
+ else
+ msg "Error retrieving secret ${key}"
+ fi
+done
diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh
index 6b4254b..8acb14e 100755
--- a/.kokoro/publish-docs.sh
+++ b/.kokoro/publish-docs.sh
@@ -18,26 +18,16 @@ set -eo pipefail
# Disable buffering, so that the logs stream through.
export PYTHONUNBUFFERED=1
-cd github/python-notebooks
-
-# Remove old nox
-python3.6 -m pip uninstall --yes --quiet nox-automation
+export PATH="${HOME}/.local/bin:${PATH}"
# Install nox
-python3.6 -m pip install --upgrade --quiet nox
-python3.6 -m nox --version
+python3 -m pip install --user --upgrade --quiet nox
+python3 -m nox --version
# build docs
nox -s docs
-python3 -m pip install gcp-docuploader
-
-# install a json parser
-sudo apt-get update
-sudo apt-get -y install software-properties-common
-sudo add-apt-repository universe
-sudo apt-get update
-sudo apt-get -y install jq
+python3 -m pip install --user gcp-docuploader
# create metadata
python3 -m docuploader create-metadata \
@@ -52,4 +42,23 @@ python3 -m docuploader create-metadata \
cat docs.metadata
# upload docs
-python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket docs-staging
+python3 -m docuploader upload docs/_build/html --metadata-file docs.metadata --staging-bucket "${STAGING_BUCKET}"
+
+
+# docfx yaml files
+nox -s docfx
+
+# create metadata.
+python3 -m docuploader create-metadata \
+ --name=$(jq --raw-output '.name // empty' .repo-metadata.json) \
+ --version=$(python3 setup.py --version) \
+ --language=$(jq --raw-output '.language // empty' .repo-metadata.json) \
+ --distribution-name=$(python3 setup.py --name) \
+ --product-page=$(jq --raw-output '.product_documentation // empty' .repo-metadata.json) \
+ --github-repository=$(jq --raw-output '.repo // empty' .repo-metadata.json) \
+ --issue-tracker=$(jq --raw-output '.issue_tracker // empty' .repo-metadata.json)
+
+cat docs.metadata
+
+# upload docs
+python3 -m docuploader upload docs/_build/html/docfx_yaml --metadata-file docs.metadata --destination-prefix docfx --staging-bucket "${V2_STAGING_BUCKET}"
diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg
index c3503de..c07503d 100644
--- a/.kokoro/release/common.cfg
+++ b/.kokoro/release/common.cfg
@@ -23,42 +23,18 @@ env_vars: {
value: "github/python-notebooks/.kokoro/release.sh"
}
-# Fetch the token needed for reporting release status to GitHub
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "yoshi-automation-github-key"
- }
- }
-}
-
-# Fetch PyPI password
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "google_cloud_pypi_password"
- }
- }
-}
-
-# Fetch magictoken to use with Magic Github Proxy
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "releasetool-magictoken"
- }
- }
+# Fetch PyPI password
+before_action {
+ fetch_keystore {
+ keystore_resource {
+ keystore_config_id: 73713
+ keyname: "google_cloud_pypi_password"
+ }
+ }
}
-# Fetch api key to use with Magic Github Proxy
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "magic-github-proxy-api-key"
- }
- }
-}
+# Tokens needed to report release status back to GitHub
+env_vars: {
+ key: "SECRET_MANAGER_KEYS"
+ value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg
index 2340230..c25d8ef 100644
--- a/.kokoro/samples/python3.6/common.cfg
+++ b/.kokoro/samples/python3.6/common.cfg
@@ -13,6 +13,12 @@ env_vars: {
value: "py-3.6"
}
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py36"
+}
+
env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
value: "github/python-notebooks/.kokoro/test-samples.sh"
diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg
index f427d16..c25aa2d 100644
--- a/.kokoro/samples/python3.7/common.cfg
+++ b/.kokoro/samples/python3.7/common.cfg
@@ -13,6 +13,12 @@ env_vars: {
value: "py-3.7"
}
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py37"
+}
+
env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
value: "github/python-notebooks/.kokoro/test-samples.sh"
diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg
index 1cbb317..e63473b 100644
--- a/.kokoro/samples/python3.8/common.cfg
+++ b/.kokoro/samples/python3.8/common.cfg
@@ -13,6 +13,12 @@ env_vars: {
value: "py-3.8"
}
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py38"
+}
+
env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
value: "github/python-notebooks/.kokoro/test-samples.sh"
diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh
index 7d68072..230a48c 100755
--- a/.kokoro/test-samples.sh
+++ b/.kokoro/test-samples.sh
@@ -28,6 +28,12 @@ if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
git checkout $LATEST_RELEASE
fi
+# Exit early if samples directory doesn't exist
+if [ ! -d "./samples" ]; then
+ echo "No tests run. `./samples` not found"
+ exit 0
+fi
+
# Disable buffering, so that the logs stream through.
export PYTHONUNBUFFERED=1
@@ -101,4 +107,4 @@ cd "$ROOT"
# Workaround for Kokoro permissions issue: delete secrets
rm testing/{test-env.sh,client-secrets.json,service-account.json}
-exit "$RTN"
\ No newline at end of file
+exit "$RTN"
diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh
index e8c4251..f39236e 100755
--- a/.kokoro/trampoline.sh
+++ b/.kokoro/trampoline.sh
@@ -15,9 +15,14 @@
set -eo pipefail
-python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" || ret_code=$?
+# Always run the cleanup script, regardless of the success of bouncing into
+# the container.
+function cleanup() {
+ chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
+ ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
+ echo "cleanup";
+}
+trap cleanup EXIT
-chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
-${KOKORO_GFILE_DIR}/trampoline_cleanup.sh || true
-
-exit ${ret_code}
+$(dirname $0)/populate-secrets.sh # Secret Manager secrets.
+python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py"
\ No newline at end of file
diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh
new file mode 100755
index 0000000..719bcd5
--- /dev/null
+++ b/.kokoro/trampoline_v2.sh
@@ -0,0 +1,487 @@
+#!/usr/bin/env bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# trampoline_v2.sh
+#
+# This script does 3 things.
+#
+# 1. Prepare the Docker image for the test
+# 2. Run the Docker with appropriate flags to run the test
+# 3. Upload the newly built Docker image
+#
+# in a way that is somewhat compatible with trampoline_v1.
+#
+# To run this script, first download few files from gcs to /dev/shm.
+# (/dev/shm is passed into the container as KOKORO_GFILE_DIR).
+#
+# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/secrets_viewer_service_account.json /dev/shm
+# gsutil cp gs://cloud-devrel-kokoro-resources/python-docs-samples/automl_secrets.txt /dev/shm
+#
+# Then run the script.
+# .kokoro/trampoline_v2.sh
+#
+# These environment variables are required:
+# TRAMPOLINE_IMAGE: The docker image to use.
+# TRAMPOLINE_DOCKERFILE: The location of the Dockerfile.
+#
+# You can optionally change these environment variables:
+# TRAMPOLINE_IMAGE_UPLOAD:
+# (true|false): Whether to upload the Docker image after the
+# successful builds.
+# TRAMPOLINE_BUILD_FILE: The script to run in the docker container.
+# TRAMPOLINE_WORKSPACE: The workspace path in the docker container.
+# Defaults to /workspace.
+# Potentially there are some repo specific envvars in .trampolinerc in
+# the project root.
+
+
+set -euo pipefail
+
+TRAMPOLINE_VERSION="2.0.5"
+
+if command -v tput >/dev/null && [[ -n "${TERM:-}" ]]; then
+ readonly IO_COLOR_RED="$(tput setaf 1)"
+ readonly IO_COLOR_GREEN="$(tput setaf 2)"
+ readonly IO_COLOR_YELLOW="$(tput setaf 3)"
+ readonly IO_COLOR_RESET="$(tput sgr0)"
+else
+ readonly IO_COLOR_RED=""
+ readonly IO_COLOR_GREEN=""
+ readonly IO_COLOR_YELLOW=""
+ readonly IO_COLOR_RESET=""
+fi
+
+function function_exists {
+ [ $(LC_ALL=C type -t $1)"" == "function" ]
+}
+
+# Logs a message using the given color. The first argument must be one
+# of the IO_COLOR_* variables defined above, such as
+# "${IO_COLOR_YELLOW}". The remaining arguments will be logged in the
+# given color. The log message will also have an RFC-3339 timestamp
+# prepended (in UTC). You can disable the color output by setting
+# TERM=vt100.
+function log_impl() {
+ local color="$1"
+ shift
+ local timestamp="$(date -u "+%Y-%m-%dT%H:%M:%SZ")"
+ echo "================================================================"
+ echo "${color}${timestamp}:" "$@" "${IO_COLOR_RESET}"
+ echo "================================================================"
+}
+
+# Logs the given message with normal coloring and a timestamp.
+function log() {
+ log_impl "${IO_COLOR_RESET}" "$@"
+}
+
+# Logs the given message in green with a timestamp.
+function log_green() {
+ log_impl "${IO_COLOR_GREEN}" "$@"
+}
+
+# Logs the given message in yellow with a timestamp.
+function log_yellow() {
+ log_impl "${IO_COLOR_YELLOW}" "$@"
+}
+
+# Logs the given message in red with a timestamp.
+function log_red() {
+ log_impl "${IO_COLOR_RED}" "$@"
+}
+
+readonly tmpdir=$(mktemp -d -t ci-XXXXXXXX)
+readonly tmphome="${tmpdir}/h"
+mkdir -p "${tmphome}"
+
+function cleanup() {
+ rm -rf "${tmpdir}"
+}
+trap cleanup EXIT
+
+RUNNING_IN_CI="${RUNNING_IN_CI:-false}"
+
+# The workspace in the container, defaults to /workspace.
+TRAMPOLINE_WORKSPACE="${TRAMPOLINE_WORKSPACE:-/workspace}"
+
+pass_down_envvars=(
+ # TRAMPOLINE_V2 variables.
+ # Tells scripts whether they are running as part of CI or not.
+ "RUNNING_IN_CI"
+ # Indicates which CI system we're in.
+ "TRAMPOLINE_CI"
+ # Indicates the version of the script.
+ "TRAMPOLINE_VERSION"
+)
+
+log_yellow "Building with Trampoline ${TRAMPOLINE_VERSION}"
+
+# Detect which CI systems we're in. If we're in any of the CI systems
+# we support, `RUNNING_IN_CI` will be true and `TRAMPOLINE_CI` will be
+# the name of the CI system. Both envvars will be passing down to the
+# container for telling which CI system we're in.
+if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then
+ # descriptive env var for indicating it's on CI.
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="kokoro"
+ if [[ "${TRAMPOLINE_USE_LEGACY_SERVICE_ACCOUNT:-}" == "true" ]]; then
+ if [[ ! -f "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json" ]]; then
+ log_red "${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json does not exist. Did you forget to mount cloud-devrel-kokoro-resources/trampoline? Aborting."
+ exit 1
+ fi
+ # This service account will be activated later.
+ TRAMPOLINE_SERVICE_ACCOUNT="${KOKORO_GFILE_DIR}/kokoro-trampoline.service-account.json"
+ else
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ gcloud auth list
+ fi
+ log_yellow "Configuring Container Registry access"
+ gcloud auth configure-docker --quiet
+ fi
+ pass_down_envvars+=(
+ # KOKORO dynamic variables.
+ "KOKORO_BUILD_NUMBER"
+ "KOKORO_BUILD_ID"
+ "KOKORO_JOB_NAME"
+ "KOKORO_GIT_COMMIT"
+ "KOKORO_GITHUB_COMMIT"
+ "KOKORO_GITHUB_PULL_REQUEST_NUMBER"
+ "KOKORO_GITHUB_PULL_REQUEST_COMMIT"
+ # For Build Cop Bot
+ "KOKORO_GITHUB_COMMIT_URL"
+ "KOKORO_GITHUB_PULL_REQUEST_URL"
+ )
+elif [[ "${TRAVIS:-}" == "true" ]]; then
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="travis"
+ pass_down_envvars+=(
+ "TRAVIS_BRANCH"
+ "TRAVIS_BUILD_ID"
+ "TRAVIS_BUILD_NUMBER"
+ "TRAVIS_BUILD_WEB_URL"
+ "TRAVIS_COMMIT"
+ "TRAVIS_COMMIT_MESSAGE"
+ "TRAVIS_COMMIT_RANGE"
+ "TRAVIS_JOB_NAME"
+ "TRAVIS_JOB_NUMBER"
+ "TRAVIS_JOB_WEB_URL"
+ "TRAVIS_PULL_REQUEST"
+ "TRAVIS_PULL_REQUEST_BRANCH"
+ "TRAVIS_PULL_REQUEST_SHA"
+ "TRAVIS_PULL_REQUEST_SLUG"
+ "TRAVIS_REPO_SLUG"
+ "TRAVIS_SECURE_ENV_VARS"
+ "TRAVIS_TAG"
+ )
+elif [[ -n "${GITHUB_RUN_ID:-}" ]]; then
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="github-workflow"
+ pass_down_envvars+=(
+ "GITHUB_WORKFLOW"
+ "GITHUB_RUN_ID"
+ "GITHUB_RUN_NUMBER"
+ "GITHUB_ACTION"
+ "GITHUB_ACTIONS"
+ "GITHUB_ACTOR"
+ "GITHUB_REPOSITORY"
+ "GITHUB_EVENT_NAME"
+ "GITHUB_EVENT_PATH"
+ "GITHUB_SHA"
+ "GITHUB_REF"
+ "GITHUB_HEAD_REF"
+ "GITHUB_BASE_REF"
+ )
+elif [[ "${CIRCLECI:-}" == "true" ]]; then
+ RUNNING_IN_CI="true"
+ TRAMPOLINE_CI="circleci"
+ pass_down_envvars+=(
+ "CIRCLE_BRANCH"
+ "CIRCLE_BUILD_NUM"
+ "CIRCLE_BUILD_URL"
+ "CIRCLE_COMPARE_URL"
+ "CIRCLE_JOB"
+ "CIRCLE_NODE_INDEX"
+ "CIRCLE_NODE_TOTAL"
+ "CIRCLE_PREVIOUS_BUILD_NUM"
+ "CIRCLE_PROJECT_REPONAME"
+ "CIRCLE_PROJECT_USERNAME"
+ "CIRCLE_REPOSITORY_URL"
+ "CIRCLE_SHA1"
+ "CIRCLE_STAGE"
+ "CIRCLE_USERNAME"
+ "CIRCLE_WORKFLOW_ID"
+ "CIRCLE_WORKFLOW_JOB_ID"
+ "CIRCLE_WORKFLOW_UPSTREAM_JOB_IDS"
+ "CIRCLE_WORKFLOW_WORKSPACE_ID"
+ )
+fi
+
+# Configure the service account for pulling the docker image.
+function repo_root() {
+ local dir="$1"
+ while [[ ! -d "${dir}/.git" ]]; do
+ dir="$(dirname "$dir")"
+ done
+ echo "${dir}"
+}
+
+# Detect the project root. In CI builds, we assume the script is in
+# the git tree and traverse from there, otherwise, traverse from `pwd`
+# to find `.git` directory.
+if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then
+ PROGRAM_PATH="$(realpath "$0")"
+ PROGRAM_DIR="$(dirname "${PROGRAM_PATH}")"
+ PROJECT_ROOT="$(repo_root "${PROGRAM_DIR}")"
+else
+ PROJECT_ROOT="$(repo_root $(pwd))"
+fi
+
+log_yellow "Changing to the project root: ${PROJECT_ROOT}."
+cd "${PROJECT_ROOT}"
+
+# To support relative path for `TRAMPOLINE_SERVICE_ACCOUNT`, we need
+# to use this environment variable in `PROJECT_ROOT`.
+if [[ -n "${TRAMPOLINE_SERVICE_ACCOUNT:-}" ]]; then
+
+ mkdir -p "${tmpdir}/gcloud"
+ gcloud_config_dir="${tmpdir}/gcloud"
+
+ log_yellow "Using isolated gcloud config: ${gcloud_config_dir}."
+ export CLOUDSDK_CONFIG="${gcloud_config_dir}"
+
+ log_yellow "Using ${TRAMPOLINE_SERVICE_ACCOUNT} for authentication."
+ gcloud auth activate-service-account \
+ --key-file "${TRAMPOLINE_SERVICE_ACCOUNT}"
+ log_yellow "Configuring Container Registry access"
+ gcloud auth configure-docker --quiet
+fi
+
+required_envvars=(
+ # The basic trampoline configurations.
+ "TRAMPOLINE_IMAGE"
+ "TRAMPOLINE_BUILD_FILE"
+)
+
+if [[ -f "${PROJECT_ROOT}/.trampolinerc" ]]; then
+ source "${PROJECT_ROOT}/.trampolinerc"
+fi
+
+log_yellow "Checking environment variables."
+for e in "${required_envvars[@]}"
+do
+ if [[ -z "${!e:-}" ]]; then
+ log "Missing ${e} env var. Aborting."
+ exit 1
+ fi
+done
+
+# We want to support legacy style TRAMPOLINE_BUILD_FILE used with V1
+# script: e.g. "github/repo-name/.kokoro/run_tests.sh"
+TRAMPOLINE_BUILD_FILE="${TRAMPOLINE_BUILD_FILE#github/*/}"
+log_yellow "Using TRAMPOLINE_BUILD_FILE: ${TRAMPOLINE_BUILD_FILE}"
+
+# ignore error on docker operations and test execution
+set +e
+
+log_yellow "Preparing Docker image."
+# We only download the docker image in CI builds.
+if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then
+ # Download the docker image specified by `TRAMPOLINE_IMAGE`
+
+ # We may want to add --max-concurrent-downloads flag.
+
+ log_yellow "Start pulling the Docker image: ${TRAMPOLINE_IMAGE}."
+ if docker pull "${TRAMPOLINE_IMAGE}"; then
+ log_green "Finished pulling the Docker image: ${TRAMPOLINE_IMAGE}."
+ has_image="true"
+ else
+ log_red "Failed pulling the Docker image: ${TRAMPOLINE_IMAGE}."
+ has_image="false"
+ fi
+else
+ # For local run, check if we have the image.
+ if docker images "${TRAMPOLINE_IMAGE}:latest" | grep "${TRAMPOLINE_IMAGE}"; then
+ has_image="true"
+ else
+ has_image="false"
+ fi
+fi
+
+
+# The default user for a Docker container has uid 0 (root). To avoid
+# creating root-owned files in the build directory we tell docker to
+# use the current user ID.
+user_uid="$(id -u)"
+user_gid="$(id -g)"
+user_name="$(id -un)"
+
+# To allow docker in docker, we add the user to the docker group in
+# the host os.
+docker_gid=$(cut -d: -f3 < <(getent group docker))
+
+update_cache="false"
+if [[ "${TRAMPOLINE_DOCKERFILE:-none}" != "none" ]]; then
+ # Build the Docker image from the source.
+ context_dir=$(dirname "${TRAMPOLINE_DOCKERFILE}")
+ docker_build_flags=(
+ "-f" "${TRAMPOLINE_DOCKERFILE}"
+ "-t" "${TRAMPOLINE_IMAGE}"
+ "--build-arg" "UID=${user_uid}"
+ "--build-arg" "USERNAME=${user_name}"
+ )
+ if [[ "${has_image}" == "true" ]]; then
+ docker_build_flags+=("--cache-from" "${TRAMPOLINE_IMAGE}")
+ fi
+
+ log_yellow "Start building the docker image."
+ if [[ "${TRAMPOLINE_VERBOSE:-false}" == "true" ]]; then
+ echo "docker build" "${docker_build_flags[@]}" "${context_dir}"
+ fi
+
+ # ON CI systems, we want to suppress docker build logs, only
+ # output the logs when it fails.
+ if [[ "${RUNNING_IN_CI:-}" == "true" ]]; then
+ if docker build "${docker_build_flags[@]}" "${context_dir}" \
+ > "${tmpdir}/docker_build.log" 2>&1; then
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ cat "${tmpdir}/docker_build.log"
+ fi
+
+ log_green "Finished building the docker image."
+ update_cache="true"
+ else
+ log_red "Failed to build the Docker image, aborting."
+ log_yellow "Dumping the build logs:"
+ cat "${tmpdir}/docker_build.log"
+ exit 1
+ fi
+ else
+ if docker build "${docker_build_flags[@]}" "${context_dir}"; then
+ log_green "Finished building the docker image."
+ update_cache="true"
+ else
+ log_red "Failed to build the Docker image, aborting."
+ exit 1
+ fi
+ fi
+else
+ if [[ "${has_image}" != "true" ]]; then
+ log_red "We do not have ${TRAMPOLINE_IMAGE} locally, aborting."
+ exit 1
+ fi
+fi
+
+# We use an array for the flags so they are easier to document.
+docker_flags=(
+ # Remove the container after it exists.
+ "--rm"
+
+ # Use the host network.
+ "--network=host"
+
+ # Run in priviledged mode. We are not using docker for sandboxing or
+ # isolation, just for packaging our dev tools.
+ "--privileged"
+
+ # Run the docker script with the user id. Because the docker image gets to
+ # write in ${PWD} you typically want this to be your user id.
+ # To allow docker in docker, we need to use docker gid on the host.
+ "--user" "${user_uid}:${docker_gid}"
+
+ # Pass down the USER.
+ "--env" "USER=${user_name}"
+
+ # Mount the project directory inside the Docker container.
+ "--volume" "${PROJECT_ROOT}:${TRAMPOLINE_WORKSPACE}"
+ "--workdir" "${TRAMPOLINE_WORKSPACE}"
+ "--env" "PROJECT_ROOT=${TRAMPOLINE_WORKSPACE}"
+
+ # Mount the temporary home directory.
+ "--volume" "${tmphome}:/h"
+ "--env" "HOME=/h"
+
+ # Allow docker in docker.
+ "--volume" "/var/run/docker.sock:/var/run/docker.sock"
+
+ # Mount the /tmp so that docker in docker can mount the files
+ # there correctly.
+ "--volume" "/tmp:/tmp"
+ # Pass down the KOKORO_GFILE_DIR and KOKORO_KEYSTORE_DIR
+ # TODO(tmatsuo): This part is not portable.
+ "--env" "TRAMPOLINE_SECRET_DIR=/secrets"
+ "--volume" "${KOKORO_GFILE_DIR:-/dev/shm}:/secrets/gfile"
+ "--env" "KOKORO_GFILE_DIR=/secrets/gfile"
+ "--volume" "${KOKORO_KEYSTORE_DIR:-/dev/shm}:/secrets/keystore"
+ "--env" "KOKORO_KEYSTORE_DIR=/secrets/keystore"
+)
+
+# Add an option for nicer output if the build gets a tty.
+if [[ -t 0 ]]; then
+ docker_flags+=("-it")
+fi
+
+# Passing down env vars
+for e in "${pass_down_envvars[@]}"
+do
+ if [[ -n "${!e:-}" ]]; then
+ docker_flags+=("--env" "${e}=${!e}")
+ fi
+done
+
+# If arguments are given, all arguments will become the commands run
+# in the container, otherwise run TRAMPOLINE_BUILD_FILE.
+if [[ $# -ge 1 ]]; then
+ log_yellow "Running the given commands '" "${@:1}" "' in the container."
+ readonly commands=("${@:1}")
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}"
+ fi
+ docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}" "${commands[@]}"
+else
+ log_yellow "Running the tests in a Docker container."
+ docker_flags+=("--entrypoint=${TRAMPOLINE_BUILD_FILE}")
+ if [[ "${TRAMPOLINE_VERBOSE:-}" == "true" ]]; then
+ echo docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}"
+ fi
+ docker run "${docker_flags[@]}" "${TRAMPOLINE_IMAGE}"
+fi
+
+
+test_retval=$?
+
+if [[ ${test_retval} -eq 0 ]]; then
+ log_green "Build finished with ${test_retval}"
+else
+ log_red "Build finished with ${test_retval}"
+fi
+
+# Only upload it when the test passes.
+if [[ "${update_cache}" == "true" ]] && \
+ [[ $test_retval == 0 ]] && \
+ [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]]; then
+ log_yellow "Uploading the Docker image."
+ if docker push "${TRAMPOLINE_IMAGE}"; then
+ log_green "Finished uploading the Docker image."
+ else
+ log_red "Failed uploading the Docker image."
+ fi
+ # Call trampoline_after_upload_hook if it's defined.
+ if function_exists trampoline_after_upload_hook; then
+ trampoline_after_upload_hook
+ fi
+
+fi
+
+exit "${test_retval}"
diff --git a/.trampolinerc b/.trampolinerc
new file mode 100644
index 0000000..995ee29
--- /dev/null
+++ b/.trampolinerc
@@ -0,0 +1,51 @@
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Template for .trampolinerc
+
+# Add required env vars here.
+required_envvars+=(
+ "STAGING_BUCKET"
+ "V2_STAGING_BUCKET"
+)
+
+# Add env vars which are passed down into the container here.
+pass_down_envvars+=(
+ "STAGING_BUCKET"
+ "V2_STAGING_BUCKET"
+)
+
+# Prevent unintentional override on the default image.
+if [[ "${TRAMPOLINE_IMAGE_UPLOAD:-false}" == "true" ]] && \
+ [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then
+ echo "Please set TRAMPOLINE_IMAGE if you want to upload the Docker image."
+ exit 1
+fi
+
+# Define the default value if it makes sense.
+if [[ -z "${TRAMPOLINE_IMAGE_UPLOAD:-}" ]]; then
+ TRAMPOLINE_IMAGE_UPLOAD=""
+fi
+
+if [[ -z "${TRAMPOLINE_IMAGE:-}" ]]; then
+ TRAMPOLINE_IMAGE=""
+fi
+
+if [[ -z "${TRAMPOLINE_DOCKERFILE:-}" ]]; then
+ TRAMPOLINE_DOCKERFILE=""
+fi
+
+if [[ -z "${TRAMPOLINE_BUILD_FILE:-}" ]]; then
+ TRAMPOLINE_BUILD_FILE=""
+fi
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
index b3d1f60..039f436 100644
--- a/CODE_OF_CONDUCT.md
+++ b/CODE_OF_CONDUCT.md
@@ -1,44 +1,95 @@
-# Contributor Code of Conduct
+# Code of Conduct
-As contributors and maintainers of this project,
-and in the interest of fostering an open and welcoming community,
-we pledge to respect all people who contribute through reporting issues,
-posting feature requests, updating documentation,
-submitting pull requests or patches, and other activities.
+## Our Pledge
-We are committed to making participation in this project
-a harassment-free experience for everyone,
-regardless of level of experience, gender, gender identity and expression,
-sexual orientation, disability, personal appearance,
-body size, race, ethnicity, age, religion, or nationality.
+In the interest of fostering an open and welcoming environment, we as
+contributors and maintainers pledge to making participation in our project and
+our community a harassment-free experience for everyone, regardless of age, body
+size, disability, ethnicity, gender identity and expression, level of
+experience, education, socio-economic status, nationality, personal appearance,
+race, religion, or sexual identity and orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment
+include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
-* The use of sexualized language or imagery
-* Personal attacks
-* Trolling or insulting/derogatory comments
-* Public or private harassment
-* Publishing other's private information,
-such as physical or electronic
-addresses, without explicit permission
-* Other unethical or unprofessional conduct.
+* The use of sexualized language or imagery and unwelcome sexual attention or
+ advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic
+ address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+ professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable
+behavior and are expected to take appropriate and fair corrective action in
+response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or reject
-comments, commits, code, wiki edits, issues, and other contributions
-that are not aligned to this Code of Conduct.
-By adopting this Code of Conduct,
-project maintainers commit themselves to fairly and consistently
-applying these principles to every aspect of managing this project.
-Project maintainers who do not follow or enforce the Code of Conduct
-may be permanently removed from the project team.
-
-This code of conduct applies both within project spaces and in public spaces
-when an individual is representing the project or its community.
-
-Instances of abusive, harassing, or otherwise unacceptable behavior
-may be reported by opening an issue
-or contacting one or more of the project maintainers.
-
-This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0,
-available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/)
+comments, commits, code, wiki edits, issues, and other contributions that are
+not aligned to this Code of Conduct, or to ban temporarily or permanently any
+contributor for other behaviors that they deem inappropriate, threatening,
+offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces
+when an individual is representing the project or its community. Examples of
+representing a project or community include using an official project e-mail
+address, posting via an official social media account, or acting as an appointed
+representative at an online or offline event. Representation of a project may be
+further defined and clarified by project maintainers.
+
+This Code of Conduct also applies outside the project spaces when the Project
+Steward has a reasonable belief that an individual's behavior may have a
+negative impact on the project or its community.
+
+## Conflict Resolution
+
+We do not believe that all conflict is bad; healthy debate and disagreement
+often yield positive results. However, it is never okay to be disrespectful or
+to engage in behavior that violates the project’s code of conduct.
+
+If you see someone violating the code of conduct, you are encouraged to address
+the behavior directly with those involved. Many issues can be resolved quickly
+and easily, and this gives people more control over the outcome of their
+dispute. If you are unable to resolve the matter for any reason, or if the
+behavior is threatening or harassing, report it. We are dedicated to providing
+an environment where participants feel welcome and safe.
+
+
+Reports should be directed to *googleapis-stewards@google.com*, the
+Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to
+receive and address reported violations of the code of conduct. They will then
+work with a committee consisting of representatives from the Open Source
+Programs Office and the Google Open Source Strategy team. If for any reason you
+are uncomfortable reaching out to the Project Steward, please email
+opensource@google.com.
+
+We will investigate every complaint, but you may not receive a direct response.
+We will use our discretion in determining when and how to follow up on reported
+incidents, which may range from not taking action to permanent expulsion from
+the project and project-sponsored spaces. We will notify the accused of the
+report and provide them an opportunity to discuss it before any action is taken.
+The identity of the reporter will be omitted from the details of the report
+supplied to the accused. In potentially harmful situations, such as ongoing
+harassment or threats to anyone's safety, we may take action without notice.
+
+## Attribution
+
+This Code of Conduct is adapted from the Contributor Covenant, version 1.4,
+available at
+https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
\ No newline at end of file
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index da1cd30..e3f9fc8 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -80,25 +80,6 @@ We use `nox `__ to instrument our tests.
.. nox: https://pypi.org/project/nox/
-Note on Editable Installs / Develop Mode
-========================================
-
-- As mentioned previously, using ``setuptools`` in `develop mode`_
- or a ``pip`` `editable install`_ is not possible with this
- library. This is because this library uses `namespace packages`_.
- For context see `Issue #2316`_ and the relevant `PyPA issue`_.
-
- Since ``editable`` / ``develop`` mode can't be used, packages
- need to be installed directly. Hence your changes to the source
- tree don't get incorporated into the **already installed**
- package.
-
-.. _namespace packages: https://www.python.org/dev/peps/pep-0420/
-.. _Issue #2316: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2316
-.. _PyPA issue: https://github.com/pypa/packaging-problems/issues/12
-.. _develop mode: https://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode
-.. _editable install: https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs
-
*****************************************
I'm getting weird errors... Can you help?
*****************************************
diff --git a/docs/conf.py b/docs/conf.py
index e2af209..c79c15d 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -20,12 +20,16 @@
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath(".."))
+# For plugins that can not read conf.py.
+# See also: https://github.com/docascode/sphinx-docfx-yaml/issues/85
+sys.path.insert(0, os.path.abspath("."))
+
__version__ = ""
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
-needs_sphinx = "1.6.3"
+needs_sphinx = "1.5.5"
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
@@ -35,6 +39,7 @@
"sphinx.ext.autosummary",
"sphinx.ext.intersphinx",
"sphinx.ext.coverage",
+ "sphinx.ext.doctest",
"sphinx.ext.napoleon",
"sphinx.ext.todo",
"sphinx.ext.viewcode",
@@ -90,7 +95,12 @@
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
-exclude_patterns = ["_build"]
+exclude_patterns = [
+ "_build",
+ "samples/AUTHORING_GUIDE.md",
+ "samples/CONTRIBUTING.md",
+ "samples/snippets/README.rst",
+]
# The reST default role (used for this markup: `text`) to use for all
# documents.
@@ -335,10 +345,11 @@
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {
- "python": ("http://python.readthedocs.org/en/latest/", None),
- "google-auth": ("https://google-auth.readthedocs.io/en/stable", None),
+ "python": ("https://python.readthedocs.org/en/latest/", None),
+ "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None),
"google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,),
- "grpc": ("https://grpc.io/grpc/python/", None),
+ "grpc": ("https://grpc.github.io/grpc/python/", None),
+ "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None),
}
diff --git a/docs/notebooks_v1beta1/types.rst b/docs/notebooks_v1beta1/types.rst
index 61809f6..b981f31 100644
--- a/docs/notebooks_v1beta1/types.rst
+++ b/docs/notebooks_v1beta1/types.rst
@@ -3,3 +3,4 @@ Types for Google Cloud Notebooks v1beta1 API
.. automodule:: google.cloud.notebooks_v1beta1.types
:members:
+ :show-inheritance:
diff --git a/google/cloud/notebooks_v1beta1/services/notebook_service/async_client.py b/google/cloud/notebooks_v1beta1/services/notebook_service/async_client.py
index 7f25e66..99fafe3 100644
--- a/google/cloud/notebooks_v1beta1/services/notebook_service/async_client.py
+++ b/google/cloud/notebooks_v1beta1/services/notebook_service/async_client.py
@@ -28,8 +28,8 @@
from google.auth import credentials # type: ignore
from google.oauth2 import service_account # type: ignore
-from google.api_core import operation
-from google.api_core import operation_async
+from google.api_core import operation # type: ignore
+from google.api_core import operation_async # type: ignore
from google.cloud.notebooks_v1beta1.services.notebook_service import pagers
from google.cloud.notebooks_v1beta1.types import environment
from google.cloud.notebooks_v1beta1.types import instance
@@ -37,13 +37,13 @@
from google.protobuf import empty_pb2 as empty # type: ignore
from google.protobuf import timestamp_pb2 as timestamp # type: ignore
-from .transports.base import NotebookServiceTransport
+from .transports.base import NotebookServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import NotebookServiceGrpcAsyncIOTransport
from .client import NotebookServiceClient
class NotebookServiceAsyncClient:
- """API service for Cloud AI Platform Notebooks."""
+ """API v1beta1 service for Cloud AI Platform Notebooks."""
_client: NotebookServiceClient
@@ -51,12 +51,51 @@ class NotebookServiceAsyncClient:
DEFAULT_MTLS_ENDPOINT = NotebookServiceClient.DEFAULT_MTLS_ENDPOINT
environment_path = staticmethod(NotebookServiceClient.environment_path)
-
+ parse_environment_path = staticmethod(NotebookServiceClient.parse_environment_path)
instance_path = staticmethod(NotebookServiceClient.instance_path)
+ parse_instance_path = staticmethod(NotebookServiceClient.parse_instance_path)
+
+ common_billing_account_path = staticmethod(
+ NotebookServiceClient.common_billing_account_path
+ )
+ parse_common_billing_account_path = staticmethod(
+ NotebookServiceClient.parse_common_billing_account_path
+ )
+
+ common_folder_path = staticmethod(NotebookServiceClient.common_folder_path)
+ parse_common_folder_path = staticmethod(
+ NotebookServiceClient.parse_common_folder_path
+ )
+
+ common_organization_path = staticmethod(
+ NotebookServiceClient.common_organization_path
+ )
+ parse_common_organization_path = staticmethod(
+ NotebookServiceClient.parse_common_organization_path
+ )
+
+ common_project_path = staticmethod(NotebookServiceClient.common_project_path)
+ parse_common_project_path = staticmethod(
+ NotebookServiceClient.parse_common_project_path
+ )
+
+ common_location_path = staticmethod(NotebookServiceClient.common_location_path)
+ parse_common_location_path = staticmethod(
+ NotebookServiceClient.parse_common_location_path
+ )
from_service_account_file = NotebookServiceClient.from_service_account_file
from_service_account_json = from_service_account_file
+ @property
+ def transport(self) -> NotebookServiceTransport:
+ """Return the transport used by the client instance.
+
+ Returns:
+ NotebookServiceTransport: The transport used by the client instance.
+ """
+ return self._client.transport
+
get_transport_class = functools.partial(
type(NotebookServiceClient).get_transport_class, type(NotebookServiceClient)
)
@@ -67,6 +106,7 @@ def __init__(
credentials: credentials.Credentials = None,
transport: Union[str, NotebookServiceTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the notebook service client.
@@ -82,16 +122,19 @@ def __init__(
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
- default endpoint provided by the client. GOOGLE_API_USE_MTLS
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
- use the default regular endpoint, this is the default value for
- the environment variable) and "auto" (auto switch to the default
- mTLS endpoint if client SSL credentials is present). However,
- the ``api_endpoint`` property takes precedence if provided.
- (2) The ``client_cert_source`` property is used to provide client
- SSL credentials for mutual TLS transport. If not provided, the
- default SSL credentials will be used if present.
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
@@ -99,7 +142,10 @@ def __init__(
"""
self._client = NotebookServiceClient(
- credentials=credentials, transport=transport, client_options=client_options,
+ credentials=credentials,
+ transport=transport,
+ client_options=client_options,
+ client_info=client_info,
)
async def list_instances(
@@ -141,7 +187,7 @@ async def list_instances(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_instances,
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -198,7 +244,7 @@ async def get_instance(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_instance,
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -253,7 +299,7 @@ async def create_instance(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_instance,
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -320,7 +366,7 @@ async def register_instance(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.register_instance,
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -382,7 +428,7 @@ async def set_instance_accelerator(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.set_instance_accelerator,
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -444,7 +490,7 @@ async def set_instance_machine_type(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.set_instance_machine_type,
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -505,7 +551,7 @@ async def set_instance_labels(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.set_instance_labels,
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -579,7 +625,7 @@ async def delete_instance(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_instance,
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -641,7 +687,7 @@ async def start_instance(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.start_instance,
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -703,7 +749,7 @@ async def stop_instance(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.stop_instance,
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -765,7 +811,7 @@ async def reset_instance(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.reset_instance,
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -831,7 +877,7 @@ async def report_instance_info(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.report_instance_info,
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -890,7 +936,7 @@ async def is_instance_upgradeable(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.is_instance_upgradeable,
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -946,7 +992,7 @@ async def upgrade_instance(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.upgrade_instance,
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -1010,7 +1056,7 @@ async def upgrade_instance_internal(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.upgrade_instance_internal,
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -1070,7 +1116,7 @@ async def list_environments(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_environments,
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -1128,7 +1174,7 @@ async def get_environment(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_environment,
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -1183,7 +1229,7 @@ async def create_environment(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_environment,
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -1257,7 +1303,7 @@ async def delete_environment(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_environment,
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -1282,11 +1328,11 @@ async def delete_environment(
try:
- _client_info = gapic_v1.client_info.ClientInfo(
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution("google-cloud-notebooks",).version,
)
except pkg_resources.DistributionNotFound:
- _client_info = gapic_v1.client_info.ClientInfo()
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("NotebookServiceAsyncClient",)
diff --git a/google/cloud/notebooks_v1beta1/services/notebook_service/client.py b/google/cloud/notebooks_v1beta1/services/notebook_service/client.py
index 7b30a7a..059c910 100644
--- a/google/cloud/notebooks_v1beta1/services/notebook_service/client.py
+++ b/google/cloud/notebooks_v1beta1/services/notebook_service/client.py
@@ -16,22 +16,24 @@
#
from collections import OrderedDict
+from distutils import util
import os
import re
-from typing import Callable, Dict, Sequence, Tuple, Type, Union
+from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
-import google.api_core.client_options as ClientOptions # type: ignore
+from google.api_core import client_options as client_options_lib # type: ignore
from google.api_core import exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials # type: ignore
from google.auth.transport import mtls # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
-from google.api_core import operation
-from google.api_core import operation_async
+from google.api_core import operation # type: ignore
+from google.api_core import operation_async # type: ignore
from google.cloud.notebooks_v1beta1.services.notebook_service import pagers
from google.cloud.notebooks_v1beta1.types import environment
from google.cloud.notebooks_v1beta1.types import instance
@@ -39,7 +41,7 @@
from google.protobuf import empty_pb2 as empty # type: ignore
from google.protobuf import timestamp_pb2 as timestamp # type: ignore
-from .transports.base import NotebookServiceTransport
+from .transports.base import NotebookServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import NotebookServiceGrpcTransport
from .transports.grpc_asyncio import NotebookServiceGrpcAsyncIOTransport
@@ -78,7 +80,7 @@ def get_transport_class(cls, label: str = None,) -> Type[NotebookServiceTranspor
class NotebookServiceClient(metaclass=NotebookServiceClientMeta):
- """API service for Cloud AI Platform Notebooks."""
+ """API v1beta1 service for Cloud AI Platform Notebooks."""
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
@@ -134,6 +136,15 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
from_service_account_json = from_service_account_file
+ @property
+ def transport(self) -> NotebookServiceTransport:
+ """Return the transport used by the client instance.
+
+ Returns:
+ NotebookServiceTransport: The transport used by the client instance.
+ """
+ return self._transport
+
@staticmethod
def environment_path(project: str, environment: str,) -> str:
"""Return a fully-qualified environment string."""
@@ -162,12 +173,72 @@ def parse_instance_path(path: str) -> Dict[str, str]:
m = re.match(r"^projects/(?P.+?)/instances/(?P.+?)$", path)
return m.groupdict() if m else {}
+ @staticmethod
+ def common_billing_account_path(billing_account: str,) -> str:
+ """Return a fully-qualified billing_account string."""
+ return "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+
+ @staticmethod
+ def parse_common_billing_account_path(path: str) -> Dict[str, str]:
+ """Parse a billing_account path into its component segments."""
+ m = re.match(r"^billingAccounts/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_folder_path(folder: str,) -> str:
+ """Return a fully-qualified folder string."""
+ return "folders/{folder}".format(folder=folder,)
+
+ @staticmethod
+ def parse_common_folder_path(path: str) -> Dict[str, str]:
+ """Parse a folder path into its component segments."""
+ m = re.match(r"^folders/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_organization_path(organization: str,) -> str:
+ """Return a fully-qualified organization string."""
+ return "organizations/{organization}".format(organization=organization,)
+
+ @staticmethod
+ def parse_common_organization_path(path: str) -> Dict[str, str]:
+ """Parse a organization path into its component segments."""
+ m = re.match(r"^organizations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_project_path(project: str,) -> str:
+ """Return a fully-qualified project string."""
+ return "projects/{project}".format(project=project,)
+
+ @staticmethod
+ def parse_common_project_path(path: str) -> Dict[str, str]:
+ """Parse a project path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_location_path(project: str, location: str,) -> str:
+ """Return a fully-qualified location string."""
+ return "projects/{project}/locations/{location}".format(
+ project=project, location=location,
+ )
+
+ @staticmethod
+ def parse_common_location_path(path: str) -> Dict[str, str]:
+ """Parse a location path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
def __init__(
self,
*,
- credentials: credentials.Credentials = None,
- transport: Union[str, NotebookServiceTransport] = None,
- client_options: ClientOptions = None,
+ credentials: Optional[credentials.Credentials] = None,
+ transport: Union[str, NotebookServiceTransport, None] = None,
+ client_options: Optional[client_options_lib.ClientOptions] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the notebook service client.
@@ -180,48 +251,74 @@ def __init__(
transport (Union[str, ~.NotebookServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
- client_options (ClientOptions): Custom options for the client. It
- won't take effect if a ``transport`` instance is provided.
+ client_options (client_options_lib.ClientOptions): Custom options for the
+ client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
- default endpoint provided by the client. GOOGLE_API_USE_MTLS
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
- use the default regular endpoint, this is the default value for
- the environment variable) and "auto" (auto switch to the default
- mTLS endpoint if client SSL credentials is present). However,
- the ``api_endpoint`` property takes precedence if provided.
- (2) The ``client_cert_source`` property is used to provide client
- SSL credentials for mutual TLS transport. If not provided, the
- default SSL credentials will be used if present.
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
- client_options = ClientOptions.from_dict(client_options)
+ client_options = client_options_lib.from_dict(client_options)
if client_options is None:
- client_options = ClientOptions.ClientOptions()
+ client_options = client_options_lib.ClientOptions()
+
+ # Create SSL credentials for mutual TLS if needed.
+ use_client_cert = bool(
+ util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
+ )
+
+ ssl_credentials = None
+ is_mtls = False
+ if use_client_cert:
+ if client_options.client_cert_source:
+ import grpc # type: ignore
+
+ cert, key = client_options.client_cert_source()
+ ssl_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ is_mtls = True
+ else:
+ creds = SslCredentials()
+ is_mtls = creds.is_mtls
+ ssl_credentials = creds.ssl_credentials if is_mtls else None
- if client_options.api_endpoint is None:
- use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never")
+ # Figure out which api endpoint to use.
+ if client_options.api_endpoint is not None:
+ api_endpoint = client_options.api_endpoint
+ else:
+ use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_mtls_env == "never":
- client_options.api_endpoint = self.DEFAULT_ENDPOINT
+ api_endpoint = self.DEFAULT_ENDPOINT
elif use_mtls_env == "always":
- client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
- has_client_cert_source = (
- client_options.client_cert_source is not None
- or mtls.has_default_client_cert_source()
- )
- client_options.api_endpoint = (
- self.DEFAULT_MTLS_ENDPOINT
- if has_client_cert_source
- else self.DEFAULT_ENDPOINT
+ api_endpoint = (
+ self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT
)
else:
raise MutualTLSChannelError(
- "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always"
+ "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
)
# Save or instantiate the transport.
@@ -245,11 +342,11 @@ def __init__(
self._transport = Transport(
credentials=credentials,
credentials_file=client_options.credentials_file,
- host=client_options.api_endpoint,
+ host=api_endpoint,
scopes=client_options.scopes,
- api_mtls_endpoint=client_options.api_endpoint,
- client_cert_source=client_options.client_cert_source,
+ ssl_channel_credentials=ssl_credentials,
quota_project_id=client_options.quota_project_id,
+ client_info=client_info,
)
def list_instances(
@@ -284,15 +381,16 @@ def list_instances(
"""
# Create or coerce a protobuf request object.
- request = service.ListInstancesRequest(request)
+ # Minor optimization to avoid making a copy if the user passes
+ # in a service.ListInstancesRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, service.ListInstancesRequest):
+ request = service.ListInstancesRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.list_instances,
- default_timeout=60.0,
- client_info=_client_info,
- )
+ rpc = self._transport._wrapped_methods[self._transport.list_instances]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -341,15 +439,16 @@ def get_instance(
"""
# Create or coerce a protobuf request object.
- request = service.GetInstanceRequest(request)
+ # Minor optimization to avoid making a copy if the user passes
+ # in a service.GetInstanceRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, service.GetInstanceRequest):
+ request = service.GetInstanceRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.get_instance,
- default_timeout=60.0,
- client_info=_client_info,
- )
+ rpc = self._transport._wrapped_methods[self._transport.get_instance]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -396,15 +495,16 @@ def create_instance(
"""
# Create or coerce a protobuf request object.
- request = service.CreateInstanceRequest(request)
+ # Minor optimization to avoid making a copy if the user passes
+ # in a service.CreateInstanceRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, service.CreateInstanceRequest):
+ request = service.CreateInstanceRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.create_instance,
- default_timeout=60.0,
- client_info=_client_info,
- )
+ rpc = self._transport._wrapped_methods[self._transport.create_instance]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -463,15 +563,16 @@ def register_instance(
"""
# Create or coerce a protobuf request object.
- request = service.RegisterInstanceRequest(request)
+ # Minor optimization to avoid making a copy if the user passes
+ # in a service.RegisterInstanceRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, service.RegisterInstanceRequest):
+ request = service.RegisterInstanceRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.register_instance,
- default_timeout=60.0,
- client_info=_client_info,
- )
+ rpc = self._transport._wrapped_methods[self._transport.register_instance]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -525,15 +626,16 @@ def set_instance_accelerator(
"""
# Create or coerce a protobuf request object.
- request = service.SetInstanceAcceleratorRequest(request)
+ # Minor optimization to avoid making a copy if the user passes
+ # in a service.SetInstanceAcceleratorRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, service.SetInstanceAcceleratorRequest):
+ request = service.SetInstanceAcceleratorRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.set_instance_accelerator,
- default_timeout=60.0,
- client_info=_client_info,
- )
+ rpc = self._transport._wrapped_methods[self._transport.set_instance_accelerator]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -587,15 +689,18 @@ def set_instance_machine_type(
"""
# Create or coerce a protobuf request object.
- request = service.SetInstanceMachineTypeRequest(request)
+ # Minor optimization to avoid making a copy if the user passes
+ # in a service.SetInstanceMachineTypeRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, service.SetInstanceMachineTypeRequest):
+ request = service.SetInstanceMachineTypeRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.set_instance_machine_type,
- default_timeout=60.0,
- client_info=_client_info,
- )
+ rpc = self._transport._wrapped_methods[
+ self._transport.set_instance_machine_type
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -648,15 +753,16 @@ def set_instance_labels(
"""
# Create or coerce a protobuf request object.
- request = service.SetInstanceLabelsRequest(request)
+ # Minor optimization to avoid making a copy if the user passes
+ # in a service.SetInstanceLabelsRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, service.SetInstanceLabelsRequest):
+ request = service.SetInstanceLabelsRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.set_instance_labels,
- default_timeout=60.0,
- client_info=_client_info,
- )
+ rpc = self._transport._wrapped_methods[self._transport.set_instance_labels]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -722,15 +828,16 @@ def delete_instance(
"""
# Create or coerce a protobuf request object.
- request = service.DeleteInstanceRequest(request)
+ # Minor optimization to avoid making a copy if the user passes
+ # in a service.DeleteInstanceRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, service.DeleteInstanceRequest):
+ request = service.DeleteInstanceRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.delete_instance,
- default_timeout=60.0,
- client_info=_client_info,
- )
+ rpc = self._transport._wrapped_methods[self._transport.delete_instance]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -784,15 +891,16 @@ def start_instance(
"""
# Create or coerce a protobuf request object.
- request = service.StartInstanceRequest(request)
+ # Minor optimization to avoid making a copy if the user passes
+ # in a service.StartInstanceRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, service.StartInstanceRequest):
+ request = service.StartInstanceRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.start_instance,
- default_timeout=60.0,
- client_info=_client_info,
- )
+ rpc = self._transport._wrapped_methods[self._transport.start_instance]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -846,15 +954,16 @@ def stop_instance(
"""
# Create or coerce a protobuf request object.
- request = service.StopInstanceRequest(request)
+ # Minor optimization to avoid making a copy if the user passes
+ # in a service.StopInstanceRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, service.StopInstanceRequest):
+ request = service.StopInstanceRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.stop_instance,
- default_timeout=60.0,
- client_info=_client_info,
- )
+ rpc = self._transport._wrapped_methods[self._transport.stop_instance]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -908,15 +1017,16 @@ def reset_instance(
"""
# Create or coerce a protobuf request object.
- request = service.ResetInstanceRequest(request)
+ # Minor optimization to avoid making a copy if the user passes
+ # in a service.ResetInstanceRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, service.ResetInstanceRequest):
+ request = service.ResetInstanceRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.reset_instance,
- default_timeout=60.0,
- client_info=_client_info,
- )
+ rpc = self._transport._wrapped_methods[self._transport.reset_instance]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -974,15 +1084,16 @@ def report_instance_info(
"""
# Create or coerce a protobuf request object.
- request = service.ReportInstanceInfoRequest(request)
+ # Minor optimization to avoid making a copy if the user passes
+ # in a service.ReportInstanceInfoRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, service.ReportInstanceInfoRequest):
+ request = service.ReportInstanceInfoRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.report_instance_info,
- default_timeout=60.0,
- client_info=_client_info,
- )
+ rpc = self._transport._wrapped_methods[self._transport.report_instance_info]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1033,15 +1144,16 @@ def is_instance_upgradeable(
"""
# Create or coerce a protobuf request object.
- request = service.IsInstanceUpgradeableRequest(request)
+ # Minor optimization to avoid making a copy if the user passes
+ # in a service.IsInstanceUpgradeableRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, service.IsInstanceUpgradeableRequest):
+ request = service.IsInstanceUpgradeableRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.is_instance_upgradeable,
- default_timeout=60.0,
- client_info=_client_info,
- )
+ rpc = self._transport._wrapped_methods[self._transport.is_instance_upgradeable]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1089,15 +1201,16 @@ def upgrade_instance(
"""
# Create or coerce a protobuf request object.
- request = service.UpgradeInstanceRequest(request)
+ # Minor optimization to avoid making a copy if the user passes
+ # in a service.UpgradeInstanceRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, service.UpgradeInstanceRequest):
+ request = service.UpgradeInstanceRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.upgrade_instance,
- default_timeout=60.0,
- client_info=_client_info,
- )
+ rpc = self._transport._wrapped_methods[self._transport.upgrade_instance]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1153,15 +1266,18 @@ def upgrade_instance_internal(
"""
# Create or coerce a protobuf request object.
- request = service.UpgradeInstanceInternalRequest(request)
+ # Minor optimization to avoid making a copy if the user passes
+ # in a service.UpgradeInstanceInternalRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, service.UpgradeInstanceInternalRequest):
+ request = service.UpgradeInstanceInternalRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.upgrade_instance_internal,
- default_timeout=60.0,
- client_info=_client_info,
- )
+ rpc = self._transport._wrapped_methods[
+ self._transport.upgrade_instance_internal
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1213,15 +1329,16 @@ def list_environments(
"""
# Create or coerce a protobuf request object.
- request = service.ListEnvironmentsRequest(request)
+ # Minor optimization to avoid making a copy if the user passes
+ # in a service.ListEnvironmentsRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, service.ListEnvironmentsRequest):
+ request = service.ListEnvironmentsRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.list_environments,
- default_timeout=60.0,
- client_info=_client_info,
- )
+ rpc = self._transport._wrapped_methods[self._transport.list_environments]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1271,15 +1388,16 @@ def get_environment(
"""
# Create or coerce a protobuf request object.
- request = service.GetEnvironmentRequest(request)
+ # Minor optimization to avoid making a copy if the user passes
+ # in a service.GetEnvironmentRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, service.GetEnvironmentRequest):
+ request = service.GetEnvironmentRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.get_environment,
- default_timeout=60.0,
- client_info=_client_info,
- )
+ rpc = self._transport._wrapped_methods[self._transport.get_environment]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1326,15 +1444,16 @@ def create_environment(
"""
# Create or coerce a protobuf request object.
- request = service.CreateEnvironmentRequest(request)
+ # Minor optimization to avoid making a copy if the user passes
+ # in a service.CreateEnvironmentRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, service.CreateEnvironmentRequest):
+ request = service.CreateEnvironmentRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.create_environment,
- default_timeout=60.0,
- client_info=_client_info,
- )
+ rpc = self._transport._wrapped_methods[self._transport.create_environment]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1400,15 +1519,16 @@ def delete_environment(
"""
# Create or coerce a protobuf request object.
- request = service.DeleteEnvironmentRequest(request)
+ # Minor optimization to avoid making a copy if the user passes
+ # in a service.DeleteEnvironmentRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, service.DeleteEnvironmentRequest):
+ request = service.DeleteEnvironmentRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = gapic_v1.method.wrap_method(
- self._transport.delete_environment,
- default_timeout=60.0,
- client_info=_client_info,
- )
+ rpc = self._transport._wrapped_methods[self._transport.delete_environment]
# Certain fields should be provided within the metadata header;
# add these here.
@@ -1432,11 +1552,11 @@ def delete_environment(
try:
- _client_info = gapic_v1.client_info.ClientInfo(
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution("google-cloud-notebooks",).version,
)
except pkg_resources.DistributionNotFound:
- _client_info = gapic_v1.client_info.ClientInfo()
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("NotebookServiceClient",)
diff --git a/google/cloud/notebooks_v1beta1/services/notebook_service/transports/base.py b/google/cloud/notebooks_v1beta1/services/notebook_service/transports/base.py
index 4270263..240525b 100644
--- a/google/cloud/notebooks_v1beta1/services/notebook_service/transports/base.py
+++ b/google/cloud/notebooks_v1beta1/services/notebook_service/transports/base.py
@@ -17,9 +17,12 @@
import abc
import typing
+import pkg_resources
-from google import auth
+from google import auth # type: ignore
from google.api_core import exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
from google.api_core import operations_v1 # type: ignore
from google.auth import credentials # type: ignore
@@ -29,6 +32,14 @@
from google.longrunning import operations_pb2 as operations # type: ignore
+try:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=pkg_resources.get_distribution("google-cloud-notebooks",).version,
+ )
+except pkg_resources.DistributionNotFound:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+
class NotebookServiceTransport(abc.ABC):
"""Abstract transport class for NotebookService."""
@@ -42,6 +53,7 @@ def __init__(
credentials_file: typing.Optional[str] = None,
scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES,
quota_project_id: typing.Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
**kwargs,
) -> None:
"""Instantiate the transport.
@@ -59,6 +71,11 @@ def __init__(
scope (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
@@ -85,6 +102,81 @@ def __init__(
# Save the credentials.
self._credentials = credentials
+ # Lifted into its own function so it can be stubbed out during tests.
+ self._prep_wrapped_messages(client_info)
+
+ def _prep_wrapped_messages(self, client_info):
+ # Precompute the wrapped methods.
+ self._wrapped_methods = {
+ self.list_instances: gapic_v1.method.wrap_method(
+ self.list_instances, default_timeout=60.0, client_info=client_info,
+ ),
+ self.get_instance: gapic_v1.method.wrap_method(
+ self.get_instance, default_timeout=60.0, client_info=client_info,
+ ),
+ self.create_instance: gapic_v1.method.wrap_method(
+ self.create_instance, default_timeout=60.0, client_info=client_info,
+ ),
+ self.register_instance: gapic_v1.method.wrap_method(
+ self.register_instance, default_timeout=60.0, client_info=client_info,
+ ),
+ self.set_instance_accelerator: gapic_v1.method.wrap_method(
+ self.set_instance_accelerator,
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.set_instance_machine_type: gapic_v1.method.wrap_method(
+ self.set_instance_machine_type,
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.set_instance_labels: gapic_v1.method.wrap_method(
+ self.set_instance_labels, default_timeout=60.0, client_info=client_info,
+ ),
+ self.delete_instance: gapic_v1.method.wrap_method(
+ self.delete_instance, default_timeout=60.0, client_info=client_info,
+ ),
+ self.start_instance: gapic_v1.method.wrap_method(
+ self.start_instance, default_timeout=60.0, client_info=client_info,
+ ),
+ self.stop_instance: gapic_v1.method.wrap_method(
+ self.stop_instance, default_timeout=60.0, client_info=client_info,
+ ),
+ self.reset_instance: gapic_v1.method.wrap_method(
+ self.reset_instance, default_timeout=60.0, client_info=client_info,
+ ),
+ self.report_instance_info: gapic_v1.method.wrap_method(
+ self.report_instance_info,
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.is_instance_upgradeable: gapic_v1.method.wrap_method(
+ self.is_instance_upgradeable,
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.upgrade_instance: gapic_v1.method.wrap_method(
+ self.upgrade_instance, default_timeout=60.0, client_info=client_info,
+ ),
+ self.upgrade_instance_internal: gapic_v1.method.wrap_method(
+ self.upgrade_instance_internal,
+ default_timeout=60.0,
+ client_info=client_info,
+ ),
+ self.list_environments: gapic_v1.method.wrap_method(
+ self.list_environments, default_timeout=60.0, client_info=client_info,
+ ),
+ self.get_environment: gapic_v1.method.wrap_method(
+ self.get_environment, default_timeout=60.0, client_info=client_info,
+ ),
+ self.create_environment: gapic_v1.method.wrap_method(
+ self.create_environment, default_timeout=60.0, client_info=client_info,
+ ),
+ self.delete_environment: gapic_v1.method.wrap_method(
+ self.delete_environment, default_timeout=60.0, client_info=client_info,
+ ),
+ }
+
@property
def operations_client(self) -> operations_v1.OperationsClient:
"""Return the client designed to process long-running operations."""
diff --git a/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc.py b/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc.py
index 04de8d6..e2b843f 100644
--- a/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc.py
+++ b/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc.py
@@ -15,15 +15,16 @@
# limitations under the License.
#
+import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple
from google.api_core import grpc_helpers # type: ignore
from google.api_core import operations_v1 # type: ignore
+from google.api_core import gapic_v1 # type: ignore
from google import auth # type: ignore
from google.auth import credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
-
import grpc # type: ignore
from google.cloud.notebooks_v1beta1.types import environment
@@ -31,13 +32,13 @@
from google.cloud.notebooks_v1beta1.types import service
from google.longrunning import operations_pb2 as operations # type: ignore
-from .base import NotebookServiceTransport
+from .base import NotebookServiceTransport, DEFAULT_CLIENT_INFO
class NotebookServiceGrpcTransport(NotebookServiceTransport):
"""gRPC backend transport for NotebookService.
- API service for Cloud AI Platform Notebooks.
+ API v1beta1 service for Cloud AI Platform Notebooks.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
@@ -59,7 +60,9 @@ def __init__(
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
- quota_project_id: Optional[str] = None
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
+ quota_project_id: Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
@@ -78,16 +81,23 @@ def __init__(
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
- api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If
- provided, it overrides the ``host`` argument and tries to create
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
- client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A
- callback to provide client SSL certificate bytes and private key
- bytes, both in PEM format. It is ignored if ``api_mtls_endpoint``
- is None.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
@@ -95,6 +105,8 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._ssl_channel_credentials = ssl_channel_credentials
+
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
@@ -102,7 +114,13 @@ def __init__(
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
+ self._ssl_channel_credentials = None
elif api_mtls_endpoint:
+ warnings.warn(
+ "api_mtls_endpoint and client_cert_source are deprecated",
+ DeprecationWarning,
+ )
+
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
@@ -133,6 +151,26 @@ def __init__(
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
)
+ self._ssl_channel_credentials = ssl_credentials
+ else:
+ host = host if ":" in host else host + ":443"
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_channel_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
+
+ self._stubs = {} # type: Dict[str, Callable]
# Run the base constructor.
super().__init__(
@@ -141,10 +179,9 @@ def __init__(
credentials_file=credentials_file,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
+ client_info=client_info,
)
- self._stubs = {} # type: Dict[str, Callable]
-
@classmethod
def create_channel(
cls,
@@ -153,7 +190,7 @@ def create_channel(
credentials_file: str = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
- **kwargs
+ **kwargs,
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
@@ -187,24 +224,13 @@ def create_channel(
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
- **kwargs
+ **kwargs,
)
@property
def grpc_channel(self) -> grpc.Channel:
- """Create the channel designed to connect to this service.
-
- This property caches on the instance; repeated calls return
- the same channel.
+ """Return the channel designed to connect to this service.
"""
- # Sanity check: Only create a new channel if we do not already
- # have one.
- if not hasattr(self, "_grpc_channel"):
- self._grpc_channel = self.create_channel(
- self._host, credentials=self._credentials,
- )
-
- # Return the channel from cache.
return self._grpc_channel
@property
diff --git a/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc_asyncio.py b/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc_asyncio.py
index 3a48628..5ee2c81 100644
--- a/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc_asyncio.py
+++ b/google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc_asyncio.py
@@ -15,10 +15,13 @@
# limitations under the License.
#
+import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple
+from google.api_core import gapic_v1 # type: ignore
from google.api_core import grpc_helpers_async # type: ignore
from google.api_core import operations_v1 # type: ignore
+from google import auth # type: ignore
from google.auth import credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
@@ -30,14 +33,14 @@
from google.cloud.notebooks_v1beta1.types import service
from google.longrunning import operations_pb2 as operations # type: ignore
-from .base import NotebookServiceTransport
+from .base import NotebookServiceTransport, DEFAULT_CLIENT_INFO
from .grpc import NotebookServiceGrpcTransport
class NotebookServiceGrpcAsyncIOTransport(NotebookServiceTransport):
"""gRPC AsyncIO backend transport for NotebookService.
- API service for Cloud AI Platform Notebooks.
+ API v1beta1 service for Cloud AI Platform Notebooks.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
@@ -101,7 +104,9 @@ def __init__(
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
quota_project_id=None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
@@ -121,16 +126,23 @@ def __init__(
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
- api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If
- provided, it overrides the ``host`` argument and tries to create
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
- client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A
- callback to provide client SSL certificate bytes and private key
- bytes, both in PEM format. It is ignored if ``api_mtls_endpoint``
- is None.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
@@ -138,6 +150,8 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._ssl_channel_credentials = ssl_channel_credentials
+
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
@@ -145,13 +159,24 @@ def __init__(
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
+ self._ssl_channel_credentials = None
elif api_mtls_endpoint:
+ warnings.warn(
+ "api_mtls_endpoint and client_cert_source are deprecated",
+ DeprecationWarning,
+ )
+
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
else api_mtls_endpoint + ":443"
)
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
@@ -171,6 +196,24 @@ def __init__(
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
)
+ self._ssl_channel_credentials = ssl_credentials
+ else:
+ host = host if ":" in host else host + ":443"
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_channel_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
# Run the base constructor.
super().__init__(
@@ -179,6 +222,7 @@ def __init__(
credentials_file=credentials_file,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
+ client_info=client_info,
)
self._stubs = {}
@@ -190,13 +234,6 @@ def grpc_channel(self) -> aio.Channel:
This property caches on the instance; repeated calls return
the same channel.
"""
- # Sanity check: Only create a new channel if we do not already
- # have one.
- if not hasattr(self, "_grpc_channel"):
- self._grpc_channel = self.create_channel(
- self._host, credentials=self._credentials,
- )
-
# Return the channel from cache.
return self._grpc_channel
diff --git a/google/cloud/notebooks_v1beta1/types/instance.py b/google/cloud/notebooks_v1beta1/types/instance.py
index 0f9e418..9a775f3 100644
--- a/google/cloud/notebooks_v1beta1/types/instance.py
+++ b/google/cloud/notebooks_v1beta1/types/instance.py
@@ -167,12 +167,16 @@ class State(proto.Enum):
STOPPING = 4
STOPPED = 5
DELETED = 6
+ UPGRADING = 7
+ INITIALIZING = 8
+ REGISTERING = 9
class DiskType(proto.Enum):
r"""Possible disk types for notebook instances."""
DISK_TYPE_UNSPECIFIED = 0
PD_STANDARD = 1
PD_SSD = 2
+ PD_BALANCED = 3
class DiskEncryption(proto.Enum):
r"""Definition of the disk encryption options."""
@@ -187,13 +191,13 @@ class AcceleratorConfig(proto.Message):
combination. TPUs are not supported.
Attributes:
- type (~.instance.Instance.AcceleratorType):
+ type_ (~.instance.Instance.AcceleratorType):
Type of this accelerator.
core_count (int):
Count of cores of this accelerator.
"""
- type = proto.Field(proto.ENUM, number=1, enum="Instance.AcceleratorType",)
+ type_ = proto.Field(proto.ENUM, number=1, enum="Instance.AcceleratorType",)
core_count = proto.Field(proto.INT64, number=2)
diff --git a/google/cloud/notebooks_v1beta1/types/service.py b/google/cloud/notebooks_v1beta1/types/service.py
index 652d657..75b9cc7 100644
--- a/google/cloud/notebooks_v1beta1/types/service.py
+++ b/google/cloud/notebooks_v1beta1/types/service.py
@@ -77,6 +77,8 @@ class OperationMetadata(proto.Message):
corresponding to ``Code.CANCELLED``.
api_version (str):
API version used to start the operation.
+ endpoint (str):
+ API endpoint name of this operation.
"""
create_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,)
@@ -93,6 +95,8 @@ class OperationMetadata(proto.Message):
api_version = proto.Field(proto.STRING, number=7)
+ endpoint = proto.Field(proto.STRING, number=8)
+
class ListInstancesRequest(proto.Message):
r"""Request for listing notebook instances.
@@ -204,19 +208,19 @@ class SetInstanceAcceleratorRequest(proto.Message):
name (str):
Required. Format:
``projects/{project_id}/locations/{location}/instances/{instance_id}``
- type (~.gcn_instance.Instance.AcceleratorType):
+ type_ (~.gcn_instance.Instance.AcceleratorType):
Required. Type of this accelerator.
core_count (int):
Required. Count of cores of this accelerator. Note that not
all combinations of ``type`` and ``core_count`` are valid.
Check `GPUs on Compute
- Engine `__ to find a valid
- combination. TPUs are not supported.
+ Engine `__
+ to find a valid combination. TPUs are not supported.
"""
name = proto.Field(proto.STRING, number=1)
- type = proto.Field(
+ type_ = proto.Field(
proto.ENUM, number=2, enum=gcn_instance.Instance.AcceleratorType,
)
@@ -232,7 +236,7 @@ class SetInstanceMachineTypeRequest(proto.Message):
``projects/{project_id}/locations/{location}/instances/{instance_id}``
machine_type (str):
Required. The `Compute Engine machine
- type `__.
+ type `__.
"""
name = proto.Field(proto.STRING, number=1)
@@ -353,12 +357,16 @@ class IsInstanceUpgradeableResponse(proto.Message):
The version this instance will be upgraded to
if calling the upgrade endpoint. This field will
only be populated if field upgradeable is true.
+ upgrade_info (str):
+ Additional information about upgrade.
"""
upgradeable = proto.Field(proto.BOOL, number=1)
upgrade_version = proto.Field(proto.STRING, number=2)
+ upgrade_info = proto.Field(proto.STRING, number=3)
+
class UpgradeInstanceRequest(proto.Message):
r"""Request for upgrading a notebook instance
diff --git a/notebooks-v1beta1-py.tar.gz b/notebooks-v1beta1-py.tar.gz
new file mode 100644
index 0000000..e69de29
diff --git a/noxfile.py b/noxfile.py
index 6f6cc45..5dc211b 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -28,7 +28,7 @@
DEFAULT_PYTHON_VERSION = "3.8"
SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"]
-UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"]
+UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"]
@nox.session(python=DEFAULT_PYTHON_VERSION)
@@ -72,7 +72,9 @@ def default(session):
# Install all test dependencies, then install this package in-place.
session.install("asyncmock", "pytest-asyncio")
- session.install("mock", "pytest", "pytest-cov")
+ session.install(
+ "mock", "pytest", "pytest-cov",
+ )
session.install("-e", ".")
# Run py.test against the unit tests.
@@ -102,6 +104,10 @@ def system(session):
"""Run the system test suite."""
system_test_path = os.path.join("tests", "system.py")
system_test_folder_path = os.path.join("tests", "system")
+
+ # Check the value of `RUN_SYSTEM_TESTS` env var. It defaults to true.
+ if os.environ.get("RUN_SYSTEM_TESTS", "true") == "false":
+ session.skip("RUN_SYSTEM_TESTS is set to false, skipping")
# Sanity check: Only run tests if the environment variable is set.
if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""):
session.skip("Credentials must be set via environment variable")
@@ -162,3 +168,38 @@ def docs(session):
os.path.join("docs", ""),
os.path.join("docs", "_build", "html", ""),
)
+
+
+@nox.session(python=DEFAULT_PYTHON_VERSION)
+def docfx(session):
+ """Build the docfx yaml files for this library."""
+
+ session.install("-e", ".")
+ # sphinx-docfx-yaml supports up to sphinx version 1.5.5.
+ # https://github.com/docascode/sphinx-docfx-yaml/issues/97
+ session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml")
+
+ shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
+ session.run(
+ "sphinx-build",
+ "-T", # show full traceback on exception
+ "-N", # no colors
+ "-D",
+ (
+ "extensions=sphinx.ext.autodoc,"
+ "sphinx.ext.autosummary,"
+ "docfx_yaml.extension,"
+ "sphinx.ext.intersphinx,"
+ "sphinx.ext.coverage,"
+ "sphinx.ext.napoleon,"
+ "sphinx.ext.todo,"
+ "sphinx.ext.viewcode,"
+ "recommonmark"
+ ),
+ "-b",
+ "html",
+ "-d",
+ os.path.join("docs", "_build", "doctrees", ""),
+ os.path.join("docs", ""),
+ os.path.join("docs", "_build", "html", ""),
+ )
diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh
index ff599eb..21f6d2a 100755
--- a/scripts/decrypt-secrets.sh
+++ b/scripts/decrypt-secrets.sh
@@ -20,14 +20,27 @@ ROOT=$( dirname "$DIR" )
# Work from the project root.
cd $ROOT
+# Prevent it from overriding files.
+# We recommend that sample authors use their own service account files and cloud project.
+# In that case, they are supposed to prepare these files by themselves.
+if [[ -f "testing/test-env.sh" ]] || \
+ [[ -f "testing/service-account.json" ]] || \
+ [[ -f "testing/client-secrets.json" ]]; then
+ echo "One or more target files exist, aborting."
+ exit 1
+fi
+
# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources.
PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}"
gcloud secrets versions access latest --secret="python-docs-samples-test-env" \
+ --project="${PROJECT_ID}" \
> testing/test-env.sh
gcloud secrets versions access latest \
--secret="python-docs-samples-service-account" \
+ --project="${PROJECT_ID}" \
> testing/service-account.json
gcloud secrets versions access latest \
--secret="python-docs-samples-client-secrets" \
- > testing/client-secrets.json
\ No newline at end of file
+ --project="${PROJECT_ID}" \
+ > testing/client-secrets.json
diff --git a/synth.metadata b/synth.metadata
index e247614..6dd9f2a 100644
--- a/synth.metadata
+++ b/synth.metadata
@@ -3,21 +3,30 @@
{
"git": {
"name": ".",
- "remote": "sso://devrel/cloud/libraries/python/python-notebooks"
+ "remote": "https://github.com/googleapis/python-notebooks.git",
+ "sha": "8e42088da1eb05c053a8255a2ac36790a95b00d2"
+ }
+ },
+ {
+ "git": {
+ "name": "googleapis",
+ "remote": "https://github.com/googleapis/googleapis.git",
+ "sha": "830887bae22f690647a0fd8b5c9eccd8d6858d74",
+ "internalRef": "345047174"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "a6643f6f27fc70e2c63c90944bb900c709022e57"
+ "sha": "a073c873f3928c561bdf87fdfbf1d081d1998984"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "a6643f6f27fc70e2c63c90944bb900c709022e57"
+ "sha": "a073c873f3928c561bdf87fdfbf1d081d1998984"
}
}
],
@@ -31,5 +40,93 @@
"generator": "bazel"
}
}
+ ],
+ "generatedFiles": [
+ ".flake8",
+ ".github/CONTRIBUTING.md",
+ ".github/ISSUE_TEMPLATE/bug_report.md",
+ ".github/ISSUE_TEMPLATE/feature_request.md",
+ ".github/ISSUE_TEMPLATE/support_request.md",
+ ".github/PULL_REQUEST_TEMPLATE.md",
+ ".github/release-please.yml",
+ ".github/snippet-bot.yml",
+ ".gitignore",
+ ".kokoro/build.sh",
+ ".kokoro/continuous/common.cfg",
+ ".kokoro/continuous/continuous.cfg",
+ ".kokoro/docker/docs/Dockerfile",
+ ".kokoro/docker/docs/fetch_gpg_keys.sh",
+ ".kokoro/docs/common.cfg",
+ ".kokoro/docs/docs-presubmit.cfg",
+ ".kokoro/docs/docs.cfg",
+ ".kokoro/populate-secrets.sh",
+ ".kokoro/presubmit/common.cfg",
+ ".kokoro/presubmit/presubmit.cfg",
+ ".kokoro/publish-docs.sh",
+ ".kokoro/release.sh",
+ ".kokoro/release/common.cfg",
+ ".kokoro/release/release.cfg",
+ ".kokoro/samples/lint/common.cfg",
+ ".kokoro/samples/lint/continuous.cfg",
+ ".kokoro/samples/lint/periodic.cfg",
+ ".kokoro/samples/lint/presubmit.cfg",
+ ".kokoro/samples/python3.6/common.cfg",
+ ".kokoro/samples/python3.6/continuous.cfg",
+ ".kokoro/samples/python3.6/periodic.cfg",
+ ".kokoro/samples/python3.6/presubmit.cfg",
+ ".kokoro/samples/python3.7/common.cfg",
+ ".kokoro/samples/python3.7/continuous.cfg",
+ ".kokoro/samples/python3.7/periodic.cfg",
+ ".kokoro/samples/python3.7/presubmit.cfg",
+ ".kokoro/samples/python3.8/common.cfg",
+ ".kokoro/samples/python3.8/continuous.cfg",
+ ".kokoro/samples/python3.8/periodic.cfg",
+ ".kokoro/samples/python3.8/presubmit.cfg",
+ ".kokoro/test-samples.sh",
+ ".kokoro/trampoline.sh",
+ ".kokoro/trampoline_v2.sh",
+ ".trampolinerc",
+ "CODE_OF_CONDUCT.md",
+ "CONTRIBUTING.rst",
+ "LICENSE",
+ "MANIFEST.in",
+ "docs/_static/custom.css",
+ "docs/_templates/layout.html",
+ "docs/conf.py",
+ "docs/multiprocessing.rst",
+ "docs/notebooks_v1beta1/services.rst",
+ "docs/notebooks_v1beta1/types.rst",
+ "google/cloud/notebooks/__init__.py",
+ "google/cloud/notebooks/py.typed",
+ "google/cloud/notebooks_v1beta1/__init__.py",
+ "google/cloud/notebooks_v1beta1/py.typed",
+ "google/cloud/notebooks_v1beta1/services/__init__.py",
+ "google/cloud/notebooks_v1beta1/services/notebook_service/__init__.py",
+ "google/cloud/notebooks_v1beta1/services/notebook_service/async_client.py",
+ "google/cloud/notebooks_v1beta1/services/notebook_service/client.py",
+ "google/cloud/notebooks_v1beta1/services/notebook_service/pagers.py",
+ "google/cloud/notebooks_v1beta1/services/notebook_service/transports/__init__.py",
+ "google/cloud/notebooks_v1beta1/services/notebook_service/transports/base.py",
+ "google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc.py",
+ "google/cloud/notebooks_v1beta1/services/notebook_service/transports/grpc_asyncio.py",
+ "google/cloud/notebooks_v1beta1/types/__init__.py",
+ "google/cloud/notebooks_v1beta1/types/environment.py",
+ "google/cloud/notebooks_v1beta1/types/instance.py",
+ "google/cloud/notebooks_v1beta1/types/service.py",
+ "mypy.ini",
+ "notebooks-v1beta1-py.tar.gz",
+ "noxfile.py",
+ "renovate.json",
+ "scripts/decrypt-secrets.sh",
+ "scripts/readme-gen/readme_gen.py",
+ "scripts/readme-gen/templates/README.tmpl.rst",
+ "scripts/readme-gen/templates/auth.tmpl.rst",
+ "scripts/readme-gen/templates/auth_api_key.tmpl.rst",
+ "scripts/readme-gen/templates/install_deps.tmpl.rst",
+ "scripts/readme-gen/templates/install_portaudio.tmpl.rst",
+ "setup.cfg",
+ "testing/.gitignore",
+ "tests/unit/gapic/notebooks_v1beta1/__init__.py",
+ "tests/unit/gapic/notebooks_v1beta1/test_notebook_service.py"
]
}
\ No newline at end of file
diff --git a/tests/unit/gapic/notebooks_v1beta1/__init__.py b/tests/unit/gapic/notebooks_v1beta1/__init__.py
new file mode 100644
index 0000000..8b13789
--- /dev/null
+++ b/tests/unit/gapic/notebooks_v1beta1/__init__.py
@@ -0,0 +1 @@
+
diff --git a/tests/unit/gapic/notebooks_v1beta1/test_notebook_service.py b/tests/unit/gapic/notebooks_v1beta1/test_notebook_service.py
index aa23227..ff762dc 100644
--- a/tests/unit/gapic/notebooks_v1beta1/test_notebook_service.py
+++ b/tests/unit/gapic/notebooks_v1beta1/test_notebook_service.py
@@ -31,7 +31,7 @@
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
-from google.api_core import operation_async
+from google.api_core import operation_async # type: ignore
from google.api_core import operations_v1
from google.auth import credentials
from google.auth.exceptions import MutualTLSChannelError
@@ -55,6 +55,17 @@ def client_cert_source_callback():
return b"cert bytes", b"key bytes"
+# If default endpoint is localhost, then default mtls endpoint will be the same.
+# This method modifies the default endpoint so the client can produce a different
+# mtls endpoint for endpoint testing purposes.
+def modify_default_endpoint(client):
+ return (
+ "foo.googleapis.com"
+ if ("localhost" in client.DEFAULT_ENDPOINT)
+ else client.DEFAULT_ENDPOINT
+ )
+
+
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
@@ -94,12 +105,12 @@ def test_notebook_service_client_from_service_account_file(client_class):
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
- assert client._transport._credentials == creds
+ assert client.transport._credentials == creds
client = client_class.from_service_account_json("dummy/file/path.json")
- assert client._transport._credentials == creds
+ assert client.transport._credentials == creds
- assert client._transport._host == "notebooks.googleapis.com:443"
+ assert client.transport._host == "notebooks.googleapis.com:443"
def test_notebook_service_client_get_transport_class():
@@ -121,6 +132,16 @@ def test_notebook_service_client_get_transport_class():
),
],
)
+@mock.patch.object(
+ NotebookServiceClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(NotebookServiceClient),
+)
+@mock.patch.object(
+ NotebookServiceAsyncClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(NotebookServiceAsyncClient),
+)
def test_notebook_service_client_client_options(
client_class, transport_class, transport_name
):
@@ -145,14 +166,14 @@ def test_notebook_service_client_client_options(
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- api_mtls_endpoint="squid.clam.whelk",
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}):
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
@@ -161,14 +182,14 @@ def test_notebook_service_client_client_options(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}):
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
@@ -177,90 +198,185 @@ def test_notebook_service_client_client_options(
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
- # "auto", and client_cert_source is provided.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+ # unsupported value.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+ with pytest.raises(MutualTLSChannelError):
+ client = client_class()
+
+ # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
+ ):
+ with pytest.raises(ValueError):
+ client = client_class()
+
+ # Check the case quota_project_id is provided
+ options = client_options.ClientOptions(quota_project_id="octopus")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id="octopus",
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name,use_client_cert_env",
+ [
+ (
+ NotebookServiceClient,
+ transports.NotebookServiceGrpcTransport,
+ "grpc",
+ "true",
+ ),
+ (
+ NotebookServiceAsyncClient,
+ transports.NotebookServiceGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "true",
+ ),
+ (
+ NotebookServiceClient,
+ transports.NotebookServiceGrpcTransport,
+ "grpc",
+ "false",
+ ),
+ (
+ NotebookServiceAsyncClient,
+ transports.NotebookServiceGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "false",
+ ),
+ ],
+)
+@mock.patch.object(
+ NotebookServiceClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(NotebookServiceClient),
+)
+@mock.patch.object(
+ NotebookServiceAsyncClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(NotebookServiceAsyncClient),
+)
+@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
+def test_notebook_service_client_mtls_env_auto(
+ client_class, transport_class, transport_name, use_client_cert_env
+):
+ # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
+ # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
+
+ # Check the case client_cert_source is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
- patched.return_value = None
- client = client_class(client_options=options)
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=client.DEFAULT_MTLS_ENDPOINT,
- scopes=None,
- api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
- client_cert_source=client_cert_source_callback,
- quota_project_id=None,
- )
-
- # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
- # "auto", and default_client_cert_source is provided.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
- with mock.patch.object(transport_class, "__init__") as patched:
+ ssl_channel_creds = mock.Mock()
with mock.patch(
- "google.auth.transport.mtls.has_default_client_cert_source",
- return_value=True,
+ "grpc.ssl_channel_credentials", return_value=ssl_channel_creds
):
patched.return_value = None
- client = client_class()
+ client = client_class(client_options=options)
+
+ if use_client_cert_env == "false":
+ expected_ssl_channel_creds = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_ssl_channel_creds = ssl_channel_creds
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_MTLS_ENDPOINT,
+ host=expected_host,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=expected_ssl_channel_creds,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
- # "auto", but client_cert_source and default_client_cert_source are None.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
+ # Check the case ADC client cert is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
- "google.auth.transport.mtls.has_default_client_cert_source",
- return_value=False,
+ "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
):
- patched.return_value = None
- client = client_class()
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
- scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
- quota_project_id=None,
- )
-
- # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has
- # unsupported value.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}):
- with pytest.raises(MutualTLSChannelError):
- client = client_class()
-
- # Check the case quota_project_id is provided
- options = client_options.ClientOptions(quota_project_id="octopus")
- with mock.patch.object(transport_class, "__init__") as patched:
- patched.return_value = None
- client = client_class(client_options=options)
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
- scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
- quota_project_id="octopus",
- )
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.is_mtls",
+ new_callable=mock.PropertyMock,
+ ) as is_mtls_mock:
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.ssl_credentials",
+ new_callable=mock.PropertyMock,
+ ) as ssl_credentials_mock:
+ if use_client_cert_env == "false":
+ is_mtls_mock.return_value = False
+ ssl_credentials_mock.return_value = None
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_ssl_channel_creds = None
+ else:
+ is_mtls_mock.return_value = True
+ ssl_credentials_mock.return_value = mock.Mock()
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_ssl_channel_creds = (
+ ssl_credentials_mock.return_value
+ )
+
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ ssl_channel_credentials=expected_ssl_channel_creds,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
+ ):
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.is_mtls",
+ new_callable=mock.PropertyMock,
+ ) as is_mtls_mock:
+ is_mtls_mock.return_value = False
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
@pytest.mark.parametrize(
@@ -287,9 +403,9 @@ def test_notebook_service_client_client_options_scopes(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -317,9 +433,9 @@ def test_notebook_service_client_client_options_credentials_file(
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -336,23 +452,25 @@ def test_notebook_service_client_client_options_from_dict():
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- api_mtls_endpoint="squid.clam.whelk",
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
-def test_list_instances(transport: str = "grpc"):
+def test_list_instances(
+ transport: str = "grpc", request_type=service.ListInstancesRequest
+):
client = NotebookServiceClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.ListInstancesRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_instances), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_instances), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = service.ListInstancesResponse(
next_page_token="next_page_token_value", unreachable=["unreachable_value"],
@@ -364,9 +482,10 @@ def test_list_instances(transport: str = "grpc"):
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.ListInstancesRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, pagers.ListInstancesPager)
assert response.next_page_token == "next_page_token_value"
@@ -374,20 +493,24 @@ def test_list_instances(transport: str = "grpc"):
assert response.unreachable == ["unreachable_value"]
+def test_list_instances_from_dict():
+ test_list_instances(request_type=dict)
+
+
@pytest.mark.asyncio
-async def test_list_instances_async(transport: str = "grpc_asyncio"):
+async def test_list_instances_async(
+ transport: str = "grpc_asyncio", request_type=service.ListInstancesRequest
+):
client = NotebookServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.ListInstancesRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_instances), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_instances), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
service.ListInstancesResponse(
@@ -402,7 +525,7 @@ async def test_list_instances_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.ListInstancesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListInstancesAsyncPager)
@@ -412,6 +535,11 @@ async def test_list_instances_async(transport: str = "grpc_asyncio"):
assert response.unreachable == ["unreachable_value"]
+@pytest.mark.asyncio
+async def test_list_instances_async_from_dict():
+ await test_list_instances_async(request_type=dict)
+
+
def test_list_instances_field_headers():
client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -421,7 +549,7 @@ def test_list_instances_field_headers():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_instances), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_instances), "__call__") as call:
call.return_value = service.ListInstancesResponse()
client.list_instances(request)
@@ -446,9 +574,7 @@ async def test_list_instances_field_headers_async():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_instances), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_instances), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
service.ListInstancesResponse()
)
@@ -469,7 +595,7 @@ def test_list_instances_pager():
client = NotebookServiceClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_instances), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_instances), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
service.ListInstancesResponse(
@@ -507,7 +633,7 @@ def test_list_instances_pages():
client = NotebookServiceClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_instances), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_instances), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
service.ListInstancesResponse(
@@ -528,8 +654,8 @@ def test_list_instances_pages():
RuntimeError,
)
pages = list(client.list_instances(request={}).pages)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
@@ -538,9 +664,7 @@ async def test_list_instances_async_pager():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_instances),
- "__call__",
- new_callable=mock.AsyncMock,
+ type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -577,9 +701,7 @@ async def test_list_instances_async_pages():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_instances),
- "__call__",
- new_callable=mock.AsyncMock,
+ type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -601,23 +723,23 @@ async def test_list_instances_async_pages():
RuntimeError,
)
pages = []
- async for page in (await client.list_instances(request={})).pages:
- pages.append(page)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
+ async for page_ in (await client.list_instances(request={})).pages:
+ pages.append(page_)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
-def test_get_instance(transport: str = "grpc"):
+def test_get_instance(transport: str = "grpc", request_type=service.GetInstanceRequest):
client = NotebookServiceClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.GetInstanceRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_instance), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = instance.Instance(
name="name_value",
@@ -649,9 +771,10 @@ def test_get_instance(transport: str = "grpc"):
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.GetInstanceRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, instance.Instance)
assert response.name == "name_value"
@@ -695,20 +818,24 @@ def test_get_instance(transport: str = "grpc"):
assert response.subnet == "subnet_value"
+def test_get_instance_from_dict():
+ test_get_instance(request_type=dict)
+
+
@pytest.mark.asyncio
-async def test_get_instance_async(transport: str = "grpc_asyncio"):
+async def test_get_instance_async(
+ transport: str = "grpc_asyncio", request_type=service.GetInstanceRequest
+):
client = NotebookServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.GetInstanceRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_instance), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
instance.Instance(
@@ -741,7 +868,7 @@ async def test_get_instance_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.GetInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, instance.Instance)
@@ -787,6 +914,11 @@ async def test_get_instance_async(transport: str = "grpc_asyncio"):
assert response.subnet == "subnet_value"
+@pytest.mark.asyncio
+async def test_get_instance_async_from_dict():
+ await test_get_instance_async(request_type=dict)
+
+
def test_get_instance_field_headers():
client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -796,7 +928,7 @@ def test_get_instance_field_headers():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_instance), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_instance), "__call__") as call:
call.return_value = instance.Instance()
client.get_instance(request)
@@ -821,9 +953,7 @@ async def test_get_instance_field_headers_async():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_instance), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_instance), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(instance.Instance())
await client.get_instance(request)
@@ -838,17 +968,19 @@ async def test_get_instance_field_headers_async():
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
-def test_create_instance(transport: str = "grpc"):
+def test_create_instance(
+ transport: str = "grpc", request_type=service.CreateInstanceRequest
+):
client = NotebookServiceClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.CreateInstanceRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.create_instance), "__call__") as call:
+ with mock.patch.object(type(client.transport.create_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
@@ -858,26 +990,30 @@ def test_create_instance(transport: str = "grpc"):
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.CreateInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+def test_create_instance_from_dict():
+ test_create_instance(request_type=dict)
+
+
@pytest.mark.asyncio
-async def test_create_instance_async(transport: str = "grpc_asyncio"):
+async def test_create_instance_async(
+ transport: str = "grpc_asyncio", request_type=service.CreateInstanceRequest
+):
client = NotebookServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.CreateInstanceRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.create_instance), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.create_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
@@ -889,12 +1025,17 @@ async def test_create_instance_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.CreateInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+@pytest.mark.asyncio
+async def test_create_instance_async_from_dict():
+ await test_create_instance_async(request_type=dict)
+
+
def test_create_instance_field_headers():
client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -904,7 +1045,7 @@ def test_create_instance_field_headers():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.create_instance), "__call__") as call:
+ with mock.patch.object(type(client.transport.create_instance), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.create_instance(request)
@@ -929,9 +1070,7 @@ async def test_create_instance_field_headers_async():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.create_instance), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.create_instance), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
@@ -948,18 +1087,20 @@ async def test_create_instance_field_headers_async():
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
-def test_register_instance(transport: str = "grpc"):
+def test_register_instance(
+ transport: str = "grpc", request_type=service.RegisterInstanceRequest
+):
client = NotebookServiceClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.RegisterInstanceRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.register_instance), "__call__"
+ type(client.transport.register_instance), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
@@ -970,25 +1111,31 @@ def test_register_instance(transport: str = "grpc"):
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.RegisterInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+def test_register_instance_from_dict():
+ test_register_instance(request_type=dict)
+
+
@pytest.mark.asyncio
-async def test_register_instance_async(transport: str = "grpc_asyncio"):
+async def test_register_instance_async(
+ transport: str = "grpc_asyncio", request_type=service.RegisterInstanceRequest
+):
client = NotebookServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.RegisterInstanceRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.register_instance), "__call__"
+ type(client.transport.register_instance), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -1001,12 +1148,17 @@ async def test_register_instance_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.RegisterInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+@pytest.mark.asyncio
+async def test_register_instance_async_from_dict():
+ await test_register_instance_async(request_type=dict)
+
+
def test_register_instance_field_headers():
client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -1017,7 +1169,7 @@ def test_register_instance_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.register_instance), "__call__"
+ type(client.transport.register_instance), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
@@ -1044,7 +1196,7 @@ async def test_register_instance_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.register_instance), "__call__"
+ type(client.transport.register_instance), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
@@ -1062,18 +1214,20 @@ async def test_register_instance_field_headers_async():
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
-def test_set_instance_accelerator(transport: str = "grpc"):
+def test_set_instance_accelerator(
+ transport: str = "grpc", request_type=service.SetInstanceAcceleratorRequest
+):
client = NotebookServiceClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.SetInstanceAcceleratorRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.set_instance_accelerator), "__call__"
+ type(client.transport.set_instance_accelerator), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
@@ -1084,25 +1238,31 @@ def test_set_instance_accelerator(transport: str = "grpc"):
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.SetInstanceAcceleratorRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+def test_set_instance_accelerator_from_dict():
+ test_set_instance_accelerator(request_type=dict)
+
+
@pytest.mark.asyncio
-async def test_set_instance_accelerator_async(transport: str = "grpc_asyncio"):
+async def test_set_instance_accelerator_async(
+ transport: str = "grpc_asyncio", request_type=service.SetInstanceAcceleratorRequest
+):
client = NotebookServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.SetInstanceAcceleratorRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.set_instance_accelerator), "__call__"
+ type(client.transport.set_instance_accelerator), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -1115,12 +1275,17 @@ async def test_set_instance_accelerator_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.SetInstanceAcceleratorRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+@pytest.mark.asyncio
+async def test_set_instance_accelerator_async_from_dict():
+ await test_set_instance_accelerator_async(request_type=dict)
+
+
def test_set_instance_accelerator_field_headers():
client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -1131,7 +1296,7 @@ def test_set_instance_accelerator_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.set_instance_accelerator), "__call__"
+ type(client.transport.set_instance_accelerator), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
@@ -1158,7 +1323,7 @@ async def test_set_instance_accelerator_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.set_instance_accelerator), "__call__"
+ type(client.transport.set_instance_accelerator), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
@@ -1176,18 +1341,20 @@ async def test_set_instance_accelerator_field_headers_async():
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
-def test_set_instance_machine_type(transport: str = "grpc"):
+def test_set_instance_machine_type(
+ transport: str = "grpc", request_type=service.SetInstanceMachineTypeRequest
+):
client = NotebookServiceClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.SetInstanceMachineTypeRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.set_instance_machine_type), "__call__"
+ type(client.transport.set_instance_machine_type), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
@@ -1198,25 +1365,31 @@ def test_set_instance_machine_type(transport: str = "grpc"):
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.SetInstanceMachineTypeRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+def test_set_instance_machine_type_from_dict():
+ test_set_instance_machine_type(request_type=dict)
+
+
@pytest.mark.asyncio
-async def test_set_instance_machine_type_async(transport: str = "grpc_asyncio"):
+async def test_set_instance_machine_type_async(
+ transport: str = "grpc_asyncio", request_type=service.SetInstanceMachineTypeRequest
+):
client = NotebookServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.SetInstanceMachineTypeRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.set_instance_machine_type), "__call__"
+ type(client.transport.set_instance_machine_type), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -1229,12 +1402,17 @@ async def test_set_instance_machine_type_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.SetInstanceMachineTypeRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+@pytest.mark.asyncio
+async def test_set_instance_machine_type_async_from_dict():
+ await test_set_instance_machine_type_async(request_type=dict)
+
+
def test_set_instance_machine_type_field_headers():
client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -1245,7 +1423,7 @@ def test_set_instance_machine_type_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.set_instance_machine_type), "__call__"
+ type(client.transport.set_instance_machine_type), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
@@ -1272,7 +1450,7 @@ async def test_set_instance_machine_type_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.set_instance_machine_type), "__call__"
+ type(client.transport.set_instance_machine_type), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
@@ -1290,18 +1468,20 @@ async def test_set_instance_machine_type_field_headers_async():
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
-def test_set_instance_labels(transport: str = "grpc"):
+def test_set_instance_labels(
+ transport: str = "grpc", request_type=service.SetInstanceLabelsRequest
+):
client = NotebookServiceClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.SetInstanceLabelsRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.set_instance_labels), "__call__"
+ type(client.transport.set_instance_labels), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
@@ -1312,25 +1492,31 @@ def test_set_instance_labels(transport: str = "grpc"):
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.SetInstanceLabelsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+def test_set_instance_labels_from_dict():
+ test_set_instance_labels(request_type=dict)
+
+
@pytest.mark.asyncio
-async def test_set_instance_labels_async(transport: str = "grpc_asyncio"):
+async def test_set_instance_labels_async(
+ transport: str = "grpc_asyncio", request_type=service.SetInstanceLabelsRequest
+):
client = NotebookServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.SetInstanceLabelsRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.set_instance_labels), "__call__"
+ type(client.transport.set_instance_labels), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -1343,12 +1529,17 @@ async def test_set_instance_labels_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.SetInstanceLabelsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+@pytest.mark.asyncio
+async def test_set_instance_labels_async_from_dict():
+ await test_set_instance_labels_async(request_type=dict)
+
+
def test_set_instance_labels_field_headers():
client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -1359,7 +1550,7 @@ def test_set_instance_labels_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.set_instance_labels), "__call__"
+ type(client.transport.set_instance_labels), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
@@ -1386,7 +1577,7 @@ async def test_set_instance_labels_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.set_instance_labels), "__call__"
+ type(client.transport.set_instance_labels), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
@@ -1404,17 +1595,19 @@ async def test_set_instance_labels_field_headers_async():
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
-def test_delete_instance(transport: str = "grpc"):
+def test_delete_instance(
+ transport: str = "grpc", request_type=service.DeleteInstanceRequest
+):
client = NotebookServiceClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.DeleteInstanceRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.delete_instance), "__call__") as call:
+ with mock.patch.object(type(client.transport.delete_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
@@ -1424,26 +1617,30 @@ def test_delete_instance(transport: str = "grpc"):
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.DeleteInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+def test_delete_instance_from_dict():
+ test_delete_instance(request_type=dict)
+
+
@pytest.mark.asyncio
-async def test_delete_instance_async(transport: str = "grpc_asyncio"):
+async def test_delete_instance_async(
+ transport: str = "grpc_asyncio", request_type=service.DeleteInstanceRequest
+):
client = NotebookServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.DeleteInstanceRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.delete_instance), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.delete_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
@@ -1455,12 +1652,17 @@ async def test_delete_instance_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.DeleteInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+@pytest.mark.asyncio
+async def test_delete_instance_async_from_dict():
+ await test_delete_instance_async(request_type=dict)
+
+
def test_delete_instance_field_headers():
client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -1470,7 +1672,7 @@ def test_delete_instance_field_headers():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.delete_instance), "__call__") as call:
+ with mock.patch.object(type(client.transport.delete_instance), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.delete_instance(request)
@@ -1495,9 +1697,7 @@ async def test_delete_instance_field_headers_async():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.delete_instance), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.delete_instance), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
@@ -1514,17 +1714,19 @@ async def test_delete_instance_field_headers_async():
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
-def test_start_instance(transport: str = "grpc"):
+def test_start_instance(
+ transport: str = "grpc", request_type=service.StartInstanceRequest
+):
client = NotebookServiceClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.StartInstanceRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.start_instance), "__call__") as call:
+ with mock.patch.object(type(client.transport.start_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
@@ -1534,26 +1736,30 @@ def test_start_instance(transport: str = "grpc"):
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.StartInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+def test_start_instance_from_dict():
+ test_start_instance(request_type=dict)
+
+
@pytest.mark.asyncio
-async def test_start_instance_async(transport: str = "grpc_asyncio"):
+async def test_start_instance_async(
+ transport: str = "grpc_asyncio", request_type=service.StartInstanceRequest
+):
client = NotebookServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.StartInstanceRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.start_instance), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.start_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
@@ -1565,12 +1771,17 @@ async def test_start_instance_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.StartInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+@pytest.mark.asyncio
+async def test_start_instance_async_from_dict():
+ await test_start_instance_async(request_type=dict)
+
+
def test_start_instance_field_headers():
client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -1580,7 +1791,7 @@ def test_start_instance_field_headers():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.start_instance), "__call__") as call:
+ with mock.patch.object(type(client.transport.start_instance), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.start_instance(request)
@@ -1605,9 +1816,7 @@ async def test_start_instance_field_headers_async():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.start_instance), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.start_instance), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
@@ -1624,17 +1833,19 @@ async def test_start_instance_field_headers_async():
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
-def test_stop_instance(transport: str = "grpc"):
+def test_stop_instance(
+ transport: str = "grpc", request_type=service.StopInstanceRequest
+):
client = NotebookServiceClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.StopInstanceRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.stop_instance), "__call__") as call:
+ with mock.patch.object(type(client.transport.stop_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
@@ -1644,26 +1855,30 @@ def test_stop_instance(transport: str = "grpc"):
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.StopInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+def test_stop_instance_from_dict():
+ test_stop_instance(request_type=dict)
+
+
@pytest.mark.asyncio
-async def test_stop_instance_async(transport: str = "grpc_asyncio"):
+async def test_stop_instance_async(
+ transport: str = "grpc_asyncio", request_type=service.StopInstanceRequest
+):
client = NotebookServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.StopInstanceRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.stop_instance), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.stop_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
@@ -1675,12 +1890,17 @@ async def test_stop_instance_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.StopInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+@pytest.mark.asyncio
+async def test_stop_instance_async_from_dict():
+ await test_stop_instance_async(request_type=dict)
+
+
def test_stop_instance_field_headers():
client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -1690,7 +1910,7 @@ def test_stop_instance_field_headers():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.stop_instance), "__call__") as call:
+ with mock.patch.object(type(client.transport.stop_instance), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.stop_instance(request)
@@ -1715,9 +1935,7 @@ async def test_stop_instance_field_headers_async():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.stop_instance), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.stop_instance), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
@@ -1734,17 +1952,19 @@ async def test_stop_instance_field_headers_async():
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
-def test_reset_instance(transport: str = "grpc"):
+def test_reset_instance(
+ transport: str = "grpc", request_type=service.ResetInstanceRequest
+):
client = NotebookServiceClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.ResetInstanceRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.reset_instance), "__call__") as call:
+ with mock.patch.object(type(client.transport.reset_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
@@ -1754,26 +1974,30 @@ def test_reset_instance(transport: str = "grpc"):
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.ResetInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+def test_reset_instance_from_dict():
+ test_reset_instance(request_type=dict)
+
+
@pytest.mark.asyncio
-async def test_reset_instance_async(transport: str = "grpc_asyncio"):
+async def test_reset_instance_async(
+ transport: str = "grpc_asyncio", request_type=service.ResetInstanceRequest
+):
client = NotebookServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.ResetInstanceRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.reset_instance), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.reset_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
@@ -1785,12 +2009,17 @@ async def test_reset_instance_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.ResetInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+@pytest.mark.asyncio
+async def test_reset_instance_async_from_dict():
+ await test_reset_instance_async(request_type=dict)
+
+
def test_reset_instance_field_headers():
client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -1800,7 +2029,7 @@ def test_reset_instance_field_headers():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.reset_instance), "__call__") as call:
+ with mock.patch.object(type(client.transport.reset_instance), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.reset_instance(request)
@@ -1825,9 +2054,7 @@ async def test_reset_instance_field_headers_async():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.reset_instance), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.reset_instance), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
@@ -1844,18 +2071,20 @@ async def test_reset_instance_field_headers_async():
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
-def test_report_instance_info(transport: str = "grpc"):
+def test_report_instance_info(
+ transport: str = "grpc", request_type=service.ReportInstanceInfoRequest
+):
client = NotebookServiceClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.ReportInstanceInfoRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.report_instance_info), "__call__"
+ type(client.transport.report_instance_info), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
@@ -1866,25 +2095,31 @@ def test_report_instance_info(transport: str = "grpc"):
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.ReportInstanceInfoRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+def test_report_instance_info_from_dict():
+ test_report_instance_info(request_type=dict)
+
+
@pytest.mark.asyncio
-async def test_report_instance_info_async(transport: str = "grpc_asyncio"):
+async def test_report_instance_info_async(
+ transport: str = "grpc_asyncio", request_type=service.ReportInstanceInfoRequest
+):
client = NotebookServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.ReportInstanceInfoRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.report_instance_info), "__call__"
+ type(client.transport.report_instance_info), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -1897,12 +2132,17 @@ async def test_report_instance_info_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.ReportInstanceInfoRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+@pytest.mark.asyncio
+async def test_report_instance_info_async_from_dict():
+ await test_report_instance_info_async(request_type=dict)
+
+
def test_report_instance_info_field_headers():
client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -1913,7 +2153,7 @@ def test_report_instance_info_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.report_instance_info), "__call__"
+ type(client.transport.report_instance_info), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
@@ -1940,7 +2180,7 @@ async def test_report_instance_info_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.report_instance_info), "__call__"
+ type(client.transport.report_instance_info), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
@@ -1958,22 +2198,26 @@ async def test_report_instance_info_field_headers_async():
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
-def test_is_instance_upgradeable(transport: str = "grpc"):
+def test_is_instance_upgradeable(
+ transport: str = "grpc", request_type=service.IsInstanceUpgradeableRequest
+):
client = NotebookServiceClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.IsInstanceUpgradeableRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.is_instance_upgradeable), "__call__"
+ type(client.transport.is_instance_upgradeable), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = service.IsInstanceUpgradeableResponse(
- upgradeable=True, upgrade_version="upgrade_version_value",
+ upgradeable=True,
+ upgrade_version="upgrade_version_value",
+ upgrade_info="upgrade_info_value",
)
response = client.is_instance_upgradeable(request)
@@ -1982,34 +2226,45 @@ def test_is_instance_upgradeable(transport: str = "grpc"):
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.IsInstanceUpgradeableRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, service.IsInstanceUpgradeableResponse)
assert response.upgradeable is True
assert response.upgrade_version == "upgrade_version_value"
+ assert response.upgrade_info == "upgrade_info_value"
+
+
+def test_is_instance_upgradeable_from_dict():
+ test_is_instance_upgradeable(request_type=dict)
+
@pytest.mark.asyncio
-async def test_is_instance_upgradeable_async(transport: str = "grpc_asyncio"):
+async def test_is_instance_upgradeable_async(
+ transport: str = "grpc_asyncio", request_type=service.IsInstanceUpgradeableRequest
+):
client = NotebookServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.IsInstanceUpgradeableRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.is_instance_upgradeable), "__call__"
+ type(client.transport.is_instance_upgradeable), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
service.IsInstanceUpgradeableResponse(
- upgradeable=True, upgrade_version="upgrade_version_value",
+ upgradeable=True,
+ upgrade_version="upgrade_version_value",
+ upgrade_info="upgrade_info_value",
)
)
@@ -2019,7 +2274,7 @@ async def test_is_instance_upgradeable_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.IsInstanceUpgradeableRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, service.IsInstanceUpgradeableResponse)
@@ -2028,6 +2283,13 @@ async def test_is_instance_upgradeable_async(transport: str = "grpc_asyncio"):
assert response.upgrade_version == "upgrade_version_value"
+ assert response.upgrade_info == "upgrade_info_value"
+
+
+@pytest.mark.asyncio
+async def test_is_instance_upgradeable_async_from_dict():
+ await test_is_instance_upgradeable_async(request_type=dict)
+
def test_is_instance_upgradeable_field_headers():
client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -2039,7 +2301,7 @@ def test_is_instance_upgradeable_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.is_instance_upgradeable), "__call__"
+ type(client.transport.is_instance_upgradeable), "__call__"
) as call:
call.return_value = service.IsInstanceUpgradeableResponse()
@@ -2069,7 +2331,7 @@ async def test_is_instance_upgradeable_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.is_instance_upgradeable), "__call__"
+ type(client.transport.is_instance_upgradeable), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
service.IsInstanceUpgradeableResponse()
@@ -2090,19 +2352,19 @@ async def test_is_instance_upgradeable_field_headers_async():
) in kw["metadata"]
-def test_upgrade_instance(transport: str = "grpc"):
+def test_upgrade_instance(
+ transport: str = "grpc", request_type=service.UpgradeInstanceRequest
+):
client = NotebookServiceClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.UpgradeInstanceRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.upgrade_instance), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
@@ -2112,26 +2374,30 @@ def test_upgrade_instance(transport: str = "grpc"):
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.UpgradeInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+def test_upgrade_instance_from_dict():
+ test_upgrade_instance(request_type=dict)
+
+
@pytest.mark.asyncio
-async def test_upgrade_instance_async(transport: str = "grpc_asyncio"):
+async def test_upgrade_instance_async(
+ transport: str = "grpc_asyncio", request_type=service.UpgradeInstanceRequest
+):
client = NotebookServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.UpgradeInstanceRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.upgrade_instance), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
@@ -2143,12 +2409,17 @@ async def test_upgrade_instance_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.UpgradeInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+@pytest.mark.asyncio
+async def test_upgrade_instance_async_from_dict():
+ await test_upgrade_instance_async(request_type=dict)
+
+
def test_upgrade_instance_field_headers():
client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -2158,9 +2429,7 @@ def test_upgrade_instance_field_headers():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.upgrade_instance), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.upgrade_instance(request)
@@ -2185,9 +2454,7 @@ async def test_upgrade_instance_field_headers_async():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.upgrade_instance), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
@@ -2204,18 +2471,20 @@ async def test_upgrade_instance_field_headers_async():
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
-def test_upgrade_instance_internal(transport: str = "grpc"):
+def test_upgrade_instance_internal(
+ transport: str = "grpc", request_type=service.UpgradeInstanceInternalRequest
+):
client = NotebookServiceClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.UpgradeInstanceInternalRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.upgrade_instance_internal), "__call__"
+ type(client.transport.upgrade_instance_internal), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
@@ -2226,25 +2495,31 @@ def test_upgrade_instance_internal(transport: str = "grpc"):
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.UpgradeInstanceInternalRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+def test_upgrade_instance_internal_from_dict():
+ test_upgrade_instance_internal(request_type=dict)
+
+
@pytest.mark.asyncio
-async def test_upgrade_instance_internal_async(transport: str = "grpc_asyncio"):
+async def test_upgrade_instance_internal_async(
+ transport: str = "grpc_asyncio", request_type=service.UpgradeInstanceInternalRequest
+):
client = NotebookServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.UpgradeInstanceInternalRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.upgrade_instance_internal), "__call__"
+ type(client.transport.upgrade_instance_internal), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -2257,12 +2532,17 @@ async def test_upgrade_instance_internal_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.UpgradeInstanceInternalRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+@pytest.mark.asyncio
+async def test_upgrade_instance_internal_async_from_dict():
+ await test_upgrade_instance_internal_async(request_type=dict)
+
+
def test_upgrade_instance_internal_field_headers():
client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -2273,7 +2553,7 @@ def test_upgrade_instance_internal_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.upgrade_instance_internal), "__call__"
+ type(client.transport.upgrade_instance_internal), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
@@ -2300,7 +2580,7 @@ async def test_upgrade_instance_internal_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.upgrade_instance_internal), "__call__"
+ type(client.transport.upgrade_instance_internal), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
@@ -2318,18 +2598,20 @@ async def test_upgrade_instance_internal_field_headers_async():
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
-def test_list_environments(transport: str = "grpc"):
+def test_list_environments(
+ transport: str = "grpc", request_type=service.ListEnvironmentsRequest
+):
client = NotebookServiceClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.ListEnvironmentsRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_environments), "__call__"
+ type(client.transport.list_environments), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = service.ListEnvironmentsResponse(
@@ -2342,9 +2624,10 @@ def test_list_environments(transport: str = "grpc"):
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.ListEnvironmentsRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, pagers.ListEnvironmentsPager)
assert response.next_page_token == "next_page_token_value"
@@ -2352,19 +2635,25 @@ def test_list_environments(transport: str = "grpc"):
assert response.unreachable == ["unreachable_value"]
+def test_list_environments_from_dict():
+ test_list_environments(request_type=dict)
+
+
@pytest.mark.asyncio
-async def test_list_environments_async(transport: str = "grpc_asyncio"):
+async def test_list_environments_async(
+ transport: str = "grpc_asyncio", request_type=service.ListEnvironmentsRequest
+):
client = NotebookServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.ListEnvironmentsRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_environments), "__call__"
+ type(client.transport.list_environments), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -2380,7 +2669,7 @@ async def test_list_environments_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.ListEnvironmentsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListEnvironmentsAsyncPager)
@@ -2390,6 +2679,11 @@ async def test_list_environments_async(transport: str = "grpc_asyncio"):
assert response.unreachable == ["unreachable_value"]
+@pytest.mark.asyncio
+async def test_list_environments_async_from_dict():
+ await test_list_environments_async(request_type=dict)
+
+
def test_list_environments_field_headers():
client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -2400,7 +2694,7 @@ def test_list_environments_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_environments), "__call__"
+ type(client.transport.list_environments), "__call__"
) as call:
call.return_value = service.ListEnvironmentsResponse()
@@ -2427,7 +2721,7 @@ async def test_list_environments_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_environments), "__call__"
+ type(client.transport.list_environments), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
service.ListEnvironmentsResponse()
@@ -2450,7 +2744,7 @@ def test_list_environments_pager():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_environments), "__call__"
+ type(client.transport.list_environments), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -2490,7 +2784,7 @@ def test_list_environments_pages():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_environments), "__call__"
+ type(client.transport.list_environments), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -2512,8 +2806,8 @@ def test_list_environments_pages():
RuntimeError,
)
pages = list(client.list_environments(request={}).pages)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
@@ -2522,7 +2816,7 @@ async def test_list_environments_async_pager():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_environments),
+ type(client.transport.list_environments),
"__call__",
new_callable=mock.AsyncMock,
) as call:
@@ -2561,7 +2855,7 @@ async def test_list_environments_async_pages():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_environments),
+ type(client.transport.list_environments),
"__call__",
new_callable=mock.AsyncMock,
) as call:
@@ -2585,23 +2879,25 @@ async def test_list_environments_async_pages():
RuntimeError,
)
pages = []
- async for page in (await client.list_environments(request={})).pages:
- pages.append(page)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
+ async for page_ in (await client.list_environments(request={})).pages:
+ pages.append(page_)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
-def test_get_environment(transport: str = "grpc"):
+def test_get_environment(
+ transport: str = "grpc", request_type=service.GetEnvironmentRequest
+):
client = NotebookServiceClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.GetEnvironmentRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_environment), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_environment), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = environment.Environment(
name="name_value",
@@ -2617,9 +2913,10 @@ def test_get_environment(transport: str = "grpc"):
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.GetEnvironmentRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, environment.Environment)
assert response.name == "name_value"
@@ -2631,20 +2928,24 @@ def test_get_environment(transport: str = "grpc"):
assert response.post_startup_script == "post_startup_script_value"
+def test_get_environment_from_dict():
+ test_get_environment(request_type=dict)
+
+
@pytest.mark.asyncio
-async def test_get_environment_async(transport: str = "grpc_asyncio"):
+async def test_get_environment_async(
+ transport: str = "grpc_asyncio", request_type=service.GetEnvironmentRequest
+):
client = NotebookServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.GetEnvironmentRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_environment), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_environment), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
environment.Environment(
@@ -2661,7 +2962,7 @@ async def test_get_environment_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.GetEnvironmentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, environment.Environment)
@@ -2675,6 +2976,11 @@ async def test_get_environment_async(transport: str = "grpc_asyncio"):
assert response.post_startup_script == "post_startup_script_value"
+@pytest.mark.asyncio
+async def test_get_environment_async_from_dict():
+ await test_get_environment_async(request_type=dict)
+
+
def test_get_environment_field_headers():
client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -2684,7 +2990,7 @@ def test_get_environment_field_headers():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_environment), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_environment), "__call__") as call:
call.return_value = environment.Environment()
client.get_environment(request)
@@ -2709,9 +3015,7 @@ async def test_get_environment_field_headers_async():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_environment), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_environment), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
environment.Environment()
)
@@ -2728,18 +3032,20 @@ async def test_get_environment_field_headers_async():
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
-def test_create_environment(transport: str = "grpc"):
+def test_create_environment(
+ transport: str = "grpc", request_type=service.CreateEnvironmentRequest
+):
client = NotebookServiceClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.CreateEnvironmentRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_environment), "__call__"
+ type(client.transport.create_environment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
@@ -2750,25 +3056,31 @@ def test_create_environment(transport: str = "grpc"):
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.CreateEnvironmentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+def test_create_environment_from_dict():
+ test_create_environment(request_type=dict)
+
+
@pytest.mark.asyncio
-async def test_create_environment_async(transport: str = "grpc_asyncio"):
+async def test_create_environment_async(
+ transport: str = "grpc_asyncio", request_type=service.CreateEnvironmentRequest
+):
client = NotebookServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.CreateEnvironmentRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_environment), "__call__"
+ type(client.transport.create_environment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -2781,12 +3093,17 @@ async def test_create_environment_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.CreateEnvironmentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+@pytest.mark.asyncio
+async def test_create_environment_async_from_dict():
+ await test_create_environment_async(request_type=dict)
+
+
def test_create_environment_field_headers():
client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -2797,7 +3114,7 @@ def test_create_environment_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_environment), "__call__"
+ type(client.transport.create_environment), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
@@ -2824,7 +3141,7 @@ async def test_create_environment_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_environment), "__call__"
+ type(client.transport.create_environment), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
@@ -2842,18 +3159,20 @@ async def test_create_environment_field_headers_async():
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
-def test_delete_environment(transport: str = "grpc"):
+def test_delete_environment(
+ transport: str = "grpc", request_type=service.DeleteEnvironmentRequest
+):
client = NotebookServiceClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.DeleteEnvironmentRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_environment), "__call__"
+ type(client.transport.delete_environment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
@@ -2864,25 +3183,31 @@ def test_delete_environment(transport: str = "grpc"):
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.DeleteEnvironmentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+def test_delete_environment_from_dict():
+ test_delete_environment(request_type=dict)
+
+
@pytest.mark.asyncio
-async def test_delete_environment_async(transport: str = "grpc_asyncio"):
+async def test_delete_environment_async(
+ transport: str = "grpc_asyncio", request_type=service.DeleteEnvironmentRequest
+):
client = NotebookServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = service.DeleteEnvironmentRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_environment), "__call__"
+ type(client.transport.delete_environment), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -2895,12 +3220,17 @@ async def test_delete_environment_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == service.DeleteEnvironmentRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
+@pytest.mark.asyncio
+async def test_delete_environment_async_from_dict():
+ await test_delete_environment_async(request_type=dict)
+
+
def test_delete_environment_field_headers():
client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -2911,7 +3241,7 @@ def test_delete_environment_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_environment), "__call__"
+ type(client.transport.delete_environment), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
@@ -2938,7 +3268,7 @@ async def test_delete_environment_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_environment), "__call__"
+ type(client.transport.delete_environment), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
@@ -2992,7 +3322,7 @@ def test_transport_instance():
credentials=credentials.AnonymousCredentials(),
)
client = NotebookServiceClient(transport=transport)
- assert client._transport is transport
+ assert client.transport is transport
def test_transport_get_channel():
@@ -3010,10 +3340,25 @@ def test_transport_get_channel():
assert channel
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.NotebookServiceGrpcTransport,
+ transports.NotebookServiceGrpcAsyncIOTransport,
+ ],
+)
+def test_transport_adc(transport_class):
+ # Test default credentials are used if not provided.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport_class()
+ adc.assert_called_once()
+
+
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = NotebookServiceClient(credentials=credentials.AnonymousCredentials(),)
- assert isinstance(client._transport, transports.NotebookServiceGrpcTransport,)
+ assert isinstance(client.transport, transports.NotebookServiceGrpcTransport,)
def test_notebook_service_base_transport_error():
@@ -3027,9 +3372,13 @@ def test_notebook_service_base_transport_error():
def test_notebook_service_base_transport():
# Instantiate the base transport.
- transport = transports.NotebookServiceTransport(
- credentials=credentials.AnonymousCredentials(),
- )
+ with mock.patch(
+ "google.cloud.notebooks_v1beta1.services.notebook_service.transports.NotebookServiceTransport.__init__"
+ ) as Transport:
+ Transport.return_value = None
+ transport = transports.NotebookServiceTransport(
+ credentials=credentials.AnonymousCredentials(),
+ )
# Every method on the transport should just blindly
# raise NotImplementedError.
@@ -3066,7 +3415,12 @@ def test_notebook_service_base_transport():
def test_notebook_service_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
- with mock.patch.object(auth, "load_credentials_from_file") as load_creds:
+ with mock.patch.object(
+ auth, "load_credentials_from_file"
+ ) as load_creds, mock.patch(
+ "google.cloud.notebooks_v1beta1.services.notebook_service.transports.NotebookServiceTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
load_creds.return_value = (credentials.AnonymousCredentials(), None)
transport = transports.NotebookServiceTransport(
credentials_file="credentials.json", quota_project_id="octopus",
@@ -3078,6 +3432,17 @@ def test_notebook_service_base_transport_with_credentials_file():
)
+def test_notebook_service_base_transport_with_adc():
+ # Test the default credentials are used if credentials and credentials_file are None.
+ with mock.patch.object(auth, "default") as adc, mock.patch(
+ "google.cloud.notebooks_v1beta1.services.notebook_service.transports.NotebookServiceTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport = transports.NotebookServiceTransport()
+ adc.assert_called_once()
+
+
def test_notebook_service_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(auth, "default") as adc:
@@ -3110,7 +3475,7 @@ def test_notebook_service_host_no_port():
api_endpoint="notebooks.googleapis.com"
),
)
- assert client._transport._host == "notebooks.googleapis.com:443"
+ assert client.transport._host == "notebooks.googleapis.com:443"
def test_notebook_service_host_with_port():
@@ -3120,192 +3485,126 @@ def test_notebook_service_host_with_port():
api_endpoint="notebooks.googleapis.com:8000"
),
)
- assert client._transport._host == "notebooks.googleapis.com:8000"
+ assert client.transport._host == "notebooks.googleapis.com:8000"
def test_notebook_service_grpc_transport_channel():
channel = grpc.insecure_channel("http://localhost/")
- # Check that if channel is provided, mtls endpoint and client_cert_source
- # won't be used.
- callback = mock.MagicMock()
+ # Check that channel is used if provided.
transport = transports.NotebookServiceGrpcTransport(
- host="squid.clam.whelk",
- channel=channel,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=callback,
+ host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
- assert not callback.called
+ assert transport._ssl_channel_credentials == None
def test_notebook_service_grpc_asyncio_transport_channel():
channel = aio.insecure_channel("http://localhost/")
- # Check that if channel is provided, mtls endpoint and client_cert_source
- # won't be used.
- callback = mock.MagicMock()
+ # Check that channel is used if provided.
transport = transports.NotebookServiceGrpcAsyncIOTransport(
- host="squid.clam.whelk",
- channel=channel,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=callback,
+ host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
- assert not callback.called
-
-
-@mock.patch("grpc.ssl_channel_credentials", autospec=True)
-@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True)
-def test_notebook_service_grpc_transport_channel_mtls_with_client_cert_source(
- grpc_create_channel, grpc_ssl_channel_cred
-):
- # Check that if channel is None, but api_mtls_endpoint and client_cert_source
- # are provided, then a mTLS channel will be created.
- mock_cred = mock.Mock()
-
- mock_ssl_cred = mock.Mock()
- grpc_ssl_channel_cred.return_value = mock_ssl_cred
-
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
-
- transport = transports.NotebookServiceGrpcTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=client_cert_source_callback,
- )
- grpc_ssl_channel_cred.assert_called_once_with(
- certificate_chain=b"cert bytes", private_key=b"key bytes"
- )
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
- )
- assert transport.grpc_channel == mock_grpc_channel
-
-
-@mock.patch("grpc.ssl_channel_credentials", autospec=True)
-@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True)
-def test_notebook_service_grpc_asyncio_transport_channel_mtls_with_client_cert_source(
- grpc_create_channel, grpc_ssl_channel_cred
-):
- # Check that if channel is None, but api_mtls_endpoint and client_cert_source
- # are provided, then a mTLS channel will be created.
- mock_cred = mock.Mock()
-
- mock_ssl_cred = mock.Mock()
- grpc_ssl_channel_cred.return_value = mock_ssl_cred
-
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
-
- transport = transports.NotebookServiceGrpcAsyncIOTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=client_cert_source_callback,
- )
- grpc_ssl_channel_cred.assert_called_once_with(
- certificate_chain=b"cert bytes", private_key=b"key bytes"
- )
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
- )
- assert transport.grpc_channel == mock_grpc_channel
+ assert transport._ssl_channel_credentials == None
@pytest.mark.parametrize(
- "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"]
+ "transport_class",
+ [
+ transports.NotebookServiceGrpcTransport,
+ transports.NotebookServiceGrpcAsyncIOTransport,
+ ],
)
-@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True)
-def test_notebook_service_grpc_transport_channel_mtls_with_adc(
- grpc_create_channel, api_mtls_endpoint
+def test_notebook_service_transport_channel_mtls_with_client_cert_source(
+ transport_class,
):
- # Check that if channel and client_cert_source are None, but api_mtls_endpoint
- # is provided, then a mTLS channel will be created with SSL ADC.
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
-
- # Mock google.auth.transport.grpc.SslCredentials class.
- mock_ssl_cred = mock.Mock()
- with mock.patch.multiple(
- "google.auth.transport.grpc.SslCredentials",
- __init__=mock.Mock(return_value=None),
- ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
- ):
- mock_cred = mock.Mock()
- transport = transports.NotebookServiceGrpcTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint=api_mtls_endpoint,
- client_cert_source=None,
- )
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
- )
- assert transport.grpc_channel == mock_grpc_channel
+ with mock.patch(
+ "grpc.ssl_channel_credentials", autospec=True
+ ) as grpc_ssl_channel_cred:
+ with mock.patch.object(
+ transport_class, "create_channel", autospec=True
+ ) as grpc_create_channel:
+ mock_ssl_cred = mock.Mock()
+ grpc_ssl_channel_cred.return_value = mock_ssl_cred
+
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+
+ cred = credentials.AnonymousCredentials()
+ with pytest.warns(DeprecationWarning):
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (cred, None)
+ transport = transport_class(
+ host="squid.clam.whelk",
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=client_cert_source_callback,
+ )
+ adc.assert_called_once()
+
+ grpc_ssl_channel_cred.assert_called_once_with(
+ certificate_chain=b"cert bytes", private_key=b"key bytes"
+ )
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+ assert transport._ssl_channel_credentials == mock_ssl_cred
@pytest.mark.parametrize(
- "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"]
+ "transport_class",
+ [
+ transports.NotebookServiceGrpcTransport,
+ transports.NotebookServiceGrpcAsyncIOTransport,
+ ],
)
-@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True)
-def test_notebook_service_grpc_asyncio_transport_channel_mtls_with_adc(
- grpc_create_channel, api_mtls_endpoint
-):
- # Check that if channel and client_cert_source are None, but api_mtls_endpoint
- # is provided, then a mTLS channel will be created with SSL ADC.
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
-
- # Mock google.auth.transport.grpc.SslCredentials class.
+def test_notebook_service_transport_channel_mtls_with_adc(transport_class):
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
- mock_cred = mock.Mock()
- transport = transports.NotebookServiceGrpcAsyncIOTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint=api_mtls_endpoint,
- client_cert_source=None,
- )
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
- )
- assert transport.grpc_channel == mock_grpc_channel
+ with mock.patch.object(
+ transport_class, "create_channel", autospec=True
+ ) as grpc_create_channel:
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+ mock_cred = mock.Mock()
+
+ with pytest.warns(DeprecationWarning):
+ transport = transport_class(
+ host="squid.clam.whelk",
+ credentials=mock_cred,
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=None,
+ )
+
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=mock_cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ )
+ assert transport.grpc_channel == mock_grpc_channel
def test_notebook_service_grpc_lro_client():
client = NotebookServiceClient(
credentials=credentials.AnonymousCredentials(), transport="grpc",
)
- transport = client._transport
+ transport = client.transport
# Ensure that we have a api-core operations client.
assert isinstance(transport.operations_client, operations_v1.OperationsClient,)
@@ -3318,7 +3617,7 @@ def test_notebook_service_grpc_lro_async_client():
client = NotebookServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport="grpc_asyncio",
)
- transport = client._client._transport
+ transport = client.transport
# Ensure that we have a api-core operations client.
assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,)
@@ -3351,8 +3650,8 @@ def test_parse_environment_path():
def test_instance_path():
- project = "squid"
- instance = "clam"
+ project = "oyster"
+ instance = "nudibranch"
expected = "projects/{project}/instances/{instance}".format(
project=project, instance=instance,
@@ -3363,11 +3662,133 @@ def test_instance_path():
def test_parse_instance_path():
expected = {
- "project": "whelk",
- "instance": "octopus",
+ "project": "cuttlefish",
+ "instance": "mussel",
}
path = NotebookServiceClient.instance_path(**expected)
# Check that the path construction is reversible.
actual = NotebookServiceClient.parse_instance_path(path)
assert expected == actual
+
+
+def test_common_billing_account_path():
+ billing_account = "winkle"
+
+ expected = "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+ actual = NotebookServiceClient.common_billing_account_path(billing_account)
+ assert expected == actual
+
+
+def test_parse_common_billing_account_path():
+ expected = {
+ "billing_account": "nautilus",
+ }
+ path = NotebookServiceClient.common_billing_account_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = NotebookServiceClient.parse_common_billing_account_path(path)
+ assert expected == actual
+
+
+def test_common_folder_path():
+ folder = "scallop"
+
+ expected = "folders/{folder}".format(folder=folder,)
+ actual = NotebookServiceClient.common_folder_path(folder)
+ assert expected == actual
+
+
+def test_parse_common_folder_path():
+ expected = {
+ "folder": "abalone",
+ }
+ path = NotebookServiceClient.common_folder_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = NotebookServiceClient.parse_common_folder_path(path)
+ assert expected == actual
+
+
+def test_common_organization_path():
+ organization = "squid"
+
+ expected = "organizations/{organization}".format(organization=organization,)
+ actual = NotebookServiceClient.common_organization_path(organization)
+ assert expected == actual
+
+
+def test_parse_common_organization_path():
+ expected = {
+ "organization": "clam",
+ }
+ path = NotebookServiceClient.common_organization_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = NotebookServiceClient.parse_common_organization_path(path)
+ assert expected == actual
+
+
+def test_common_project_path():
+ project = "whelk"
+
+ expected = "projects/{project}".format(project=project,)
+ actual = NotebookServiceClient.common_project_path(project)
+ assert expected == actual
+
+
+def test_parse_common_project_path():
+ expected = {
+ "project": "octopus",
+ }
+ path = NotebookServiceClient.common_project_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = NotebookServiceClient.parse_common_project_path(path)
+ assert expected == actual
+
+
+def test_common_location_path():
+ project = "oyster"
+ location = "nudibranch"
+
+ expected = "projects/{project}/locations/{location}".format(
+ project=project, location=location,
+ )
+ actual = NotebookServiceClient.common_location_path(project, location)
+ assert expected == actual
+
+
+def test_parse_common_location_path():
+ expected = {
+ "project": "cuttlefish",
+ "location": "mussel",
+ }
+ path = NotebookServiceClient.common_location_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = NotebookServiceClient.parse_common_location_path(path)
+ assert expected == actual
+
+
+def test_client_withDEFAULT_CLIENT_INFO():
+ client_info = gapic_v1.client_info.ClientInfo()
+
+ with mock.patch.object(
+ transports.NotebookServiceTransport, "_prep_wrapped_messages"
+ ) as prep:
+ client = NotebookServiceClient(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
+
+ with mock.patch.object(
+ transports.NotebookServiceTransport, "_prep_wrapped_messages"
+ ) as prep:
+ transport_class = NotebookServiceClient.get_transport_class()
+ transport = transport_class(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)