diff --git a/.coveragerc b/.coveragerc
new file mode 100644
index 0000000..8d13724
--- /dev/null
+++ b/.coveragerc
@@ -0,0 +1,18 @@
+[run]
+branch = True
+
+[report]
+fail_under = 100
+show_missing = True
+omit =
+ google/cloud/assuredworkloads/__init__.py
+exclude_lines =
+ # Re-enable the standard pragma
+ pragma: NO COVER
+ # Ignore debug-only repr
+ def __repr__
+ # Ignore pkg_resources exceptions.
+ # This is added at the module level as a safeguard for if someone
+ # generates the code and tries to run it without pip installing. This
+ # makes it virtually impossible to test properly.
+ except pkg_resources.DistributionNotFound
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
index f0325e8..c4121bb 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.md
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -20,7 +20,7 @@ If you are still having issues, please be sure to include as much information as
- OS type and version:
- Python version: `python --version`
- pip version: `pip --version`
- - `google-cloud-assured-workflows` version: `pip show google-cloud-assured-workflows`
+ - `google-cloud-assured-workloads` version: `pip show google-cloud-assured-workloads`
#### Steps to reproduce
diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml
new file mode 100644
index 0000000..fc281c0
--- /dev/null
+++ b/.github/header-checker-lint.yml
@@ -0,0 +1,15 @@
+{"allowedCopyrightHolders": ["Google LLC"],
+ "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"],
+ "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt"],
+ "sourceFileExtensions": [
+ "ts",
+ "js",
+ "java",
+ "sh",
+ "Dockerfile",
+ "yaml",
+ "py",
+ "html",
+ "txt"
+ ]
+}
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index b9daa52..b4243ce 100644
--- a/.gitignore
+++ b/.gitignore
@@ -50,8 +50,10 @@ docs.metadata
# Virtual environment
env/
+
+# Test logs
coverage.xml
-sponge_log.xml
+*sponge_log.xml
# System test environment variables.
system_tests/local_test_setup
diff --git a/.kokoro/build.sh b/.kokoro/build.sh
index 47bb203..5a1825c 100755
--- a/.kokoro/build.sh
+++ b/.kokoro/build.sh
@@ -15,7 +15,11 @@
set -eo pipefail
-cd github/python-assured-workloads
+if [[ -z "${PROJECT_ROOT:-}" ]]; then
+ PROJECT_ROOT="github/python-assured-workloads"
+fi
+
+cd "${PROJECT_ROOT}"
# Disable buffering, so that the logs stream through.
export PYTHONUNBUFFERED=1
@@ -30,16 +34,26 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json
export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json")
# Remove old nox
-python3.6 -m pip uninstall --yes --quiet nox-automation
+python3 -m pip uninstall --yes --quiet nox-automation
# Install nox
-python3.6 -m pip install --upgrade --quiet nox
-python3.6 -m nox --version
+python3 -m pip install --upgrade --quiet nox
+python3 -m nox --version
+
+# If this is a continuous build, send the test log to the FlakyBot.
+# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot.
+if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then
+ cleanup() {
+ chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ }
+ trap cleanup EXIT HUP
+fi
# If NOX_SESSION is set, it only runs the specified session,
# otherwise run all the sessions.
if [[ -n "${NOX_SESSION:-}" ]]; then
- python3.6 -m nox -s "${NOX_SESSION:-}"
+ python3 -m nox -s ${NOX_SESSION:-}
else
- python3.6 -m nox
+ python3 -m nox
fi
diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg
index 1118107..959a122 100644
--- a/.kokoro/docs/docs-presubmit.cfg
+++ b/.kokoro/docs/docs-presubmit.cfg
@@ -15,3 +15,14 @@ env_vars: {
key: "TRAMPOLINE_IMAGE_UPLOAD"
value: "false"
}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-assured-workloads/.kokoro/build.sh"
+}
+
+# Only run this nox session.
+env_vars: {
+ key: "NOX_SESSION"
+ value: "docs docfx"
+}
diff --git a/.kokoro/samples/python3.6/periodic-head.cfg b/.kokoro/samples/python3.6/periodic-head.cfg
new file mode 100644
index 0000000..f9cfcd3
--- /dev/null
+++ b/.kokoro/samples/python3.6/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-pubsub/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.7/periodic-head.cfg b/.kokoro/samples/python3.7/periodic-head.cfg
new file mode 100644
index 0000000..f9cfcd3
--- /dev/null
+++ b/.kokoro/samples/python3.7/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-pubsub/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.8/periodic-head.cfg b/.kokoro/samples/python3.8/periodic-head.cfg
new file mode 100644
index 0000000..f9cfcd3
--- /dev/null
+++ b/.kokoro/samples/python3.8/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-pubsub/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh
new file mode 100755
index 0000000..7b6d13d
--- /dev/null
+++ b/.kokoro/test-samples-against-head.sh
@@ -0,0 +1,28 @@
+#!/bin/bash
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# A customized test runner for samples.
+#
+# For periodic builds, you can specify this file for testing against head.
+
+# `-e` enables the script to automatically fail when a command fails
+# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
+set -eo pipefail
+# Enables `**` to include files nested inside sub-folders
+shopt -s globstar
+
+cd github/python-assured-workloads
+
+exec .kokoro/test-samples-impl.sh
diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh
new file mode 100755
index 0000000..cf5de74
--- /dev/null
+++ b/.kokoro/test-samples-impl.sh
@@ -0,0 +1,102 @@
+#!/bin/bash
+# Copyright 2021 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# `-e` enables the script to automatically fail when a command fails
+# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
+set -eo pipefail
+# Enables `**` to include files nested inside sub-folders
+shopt -s globstar
+
+# Exit early if samples directory doesn't exist
+if [ ! -d "./samples" ]; then
+ echo "No tests run. `./samples` not found"
+ exit 0
+fi
+
+# Disable buffering, so that the logs stream through.
+export PYTHONUNBUFFERED=1
+
+# Debug: show build environment
+env | grep KOKORO
+
+# Install nox
+python3.6 -m pip install --upgrade --quiet nox
+
+# Use secrets acessor service account to get secrets
+if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then
+ gcloud auth activate-service-account \
+ --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \
+ --project="cloud-devrel-kokoro-resources"
+fi
+
+# This script will create 3 files:
+# - testing/test-env.sh
+# - testing/service-account.json
+# - testing/client-secrets.json
+./scripts/decrypt-secrets.sh
+
+source ./testing/test-env.sh
+export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json
+
+# For cloud-run session, we activate the service account for gcloud sdk.
+gcloud auth activate-service-account \
+ --key-file "${GOOGLE_APPLICATION_CREDENTIALS}"
+
+export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json
+
+echo -e "\n******************** TESTING PROJECTS ********************"
+
+# Switch to 'fail at end' to allow all tests to complete before exiting.
+set +e
+# Use RTN to return a non-zero value if the test fails.
+RTN=0
+ROOT=$(pwd)
+# Find all requirements.txt in the samples directory (may break on whitespace).
+for file in samples/**/requirements.txt; do
+ cd "$ROOT"
+ # Navigate to the project folder.
+ file=$(dirname "$file")
+ cd "$file"
+
+ echo "------------------------------------------------------------"
+ echo "- testing $file"
+ echo "------------------------------------------------------------"
+
+ # Use nox to execute the tests for the project.
+ python3.6 -m nox -s "$RUN_TESTS_SESSION"
+ EXIT=$?
+
+ # If this is a periodic build, send the test log to the FlakyBot.
+ # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot.
+ if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
+ chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ fi
+
+ if [[ $EXIT -ne 0 ]]; then
+ RTN=1
+ echo -e "\n Testing failed: Nox returned a non-zero exit code. \n"
+ else
+ echo -e "\n Testing completed.\n"
+ fi
+
+done
+cd "$ROOT"
+
+# Workaround for Kokoro permissions issue: delete secrets
+rm testing/{test-env.sh,client-secrets.json,service-account.json}
+
+exit "$RTN"
diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh
index f248af5..52a44ca 100755
--- a/.kokoro/test-samples.sh
+++ b/.kokoro/test-samples.sh
@@ -13,6 +13,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+# The default test runner for samples.
+#
+# For periodic builds, we rewinds the repo to the latest release, and
+# run test-samples-impl.sh.
# `-e` enables the script to automatically fail when a command fails
# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero
@@ -24,87 +28,19 @@ cd github/python-assured-workloads
# Run periodic samples tests at latest release
if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
+ # preserving the test runner implementation.
+ cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh"
+ echo "--- IMPORTANT IMPORTANT IMPORTANT ---"
+ echo "Now we rewind the repo back to the latest release..."
LATEST_RELEASE=$(git describe --abbrev=0 --tags)
git checkout $LATEST_RELEASE
-fi
-
-# Exit early if samples directory doesn't exist
-if [ ! -d "./samples" ]; then
- echo "No tests run. `./samples` not found"
- exit 0
-fi
-
-# Disable buffering, so that the logs stream through.
-export PYTHONUNBUFFERED=1
-
-# Debug: show build environment
-env | grep KOKORO
-
-# Install nox
-python3.6 -m pip install --upgrade --quiet nox
-
-# Use secrets acessor service account to get secrets
-if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then
- gcloud auth activate-service-account \
- --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \
- --project="cloud-devrel-kokoro-resources"
-fi
-
-# This script will create 3 files:
-# - testing/test-env.sh
-# - testing/service-account.json
-# - testing/client-secrets.json
-./scripts/decrypt-secrets.sh
-
-source ./testing/test-env.sh
-export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json
-
-# For cloud-run session, we activate the service account for gcloud sdk.
-gcloud auth activate-service-account \
- --key-file "${GOOGLE_APPLICATION_CREDENTIALS}"
-
-export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json
-
-echo -e "\n******************** TESTING PROJECTS ********************"
-
-# Switch to 'fail at end' to allow all tests to complete before exiting.
-set +e
-# Use RTN to return a non-zero value if the test fails.
-RTN=0
-ROOT=$(pwd)
-# Find all requirements.txt in the samples directory (may break on whitespace).
-for file in samples/**/requirements.txt; do
- cd "$ROOT"
- # Navigate to the project folder.
- file=$(dirname "$file")
- cd "$file"
-
- echo "------------------------------------------------------------"
- echo "- testing $file"
- echo "------------------------------------------------------------"
-
- # Use nox to execute the tests for the project.
- python3.6 -m nox -s "$RUN_TESTS_SESSION"
- EXIT=$?
-
- # If this is a periodic build, send the test log to the FlakyBot.
- # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot.
- if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
- chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot
- $KOKORO_GFILE_DIR/linux_amd64/flakybot
+ echo "The current head is: "
+ echo $(git rev-parse --verify HEAD)
+ echo "--- IMPORTANT IMPORTANT IMPORTANT ---"
+ # move back the test runner implementation if there's no file.
+ if [ ! -f .kokoro/test-samples-impl.sh ]; then
+ cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh
fi
+fi
- if [[ $EXIT -ne 0 ]]; then
- RTN=1
- echo -e "\n Testing failed: Nox returned a non-zero exit code. \n"
- else
- echo -e "\n Testing completed.\n"
- fi
-
-done
-cd "$ROOT"
-
-# Workaround for Kokoro permissions issue: delete secrets
-rm testing/{test-env.sh,client-secrets.json,service-account.json}
-
-exit "$RTN"
+exec .kokoro/test-samples-impl.sh
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index a9024b1..32302e4 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -12,6 +12,6 @@ repos:
hooks:
- id: black
- repo: https://gitlab.com/pycqa/flake8
- rev: 3.8.4
+ rev: 3.9.0
hooks:
- id: flake8
diff --git a/.trampolinerc b/.trampolinerc
index 995ee29..383b6ec 100644
--- a/.trampolinerc
+++ b/.trampolinerc
@@ -24,6 +24,7 @@ required_envvars+=(
pass_down_envvars+=(
"STAGING_BUCKET"
"V2_STAGING_BUCKET"
+ "NOX_SESSION"
)
# Prevent unintentional override on the default image.
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index f431389..2ce45f4 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -70,9 +70,14 @@ We use `nox `__ to instrument our tests.
- To test your changes, run unit tests with ``nox``::
$ nox -s unit-2.7
- $ nox -s unit-3.7
+ $ nox -s unit-3.8
$ ...
+- Args to pytest can be passed through the nox command separated by a `--`. For
+ example, to run a single test::
+
+ $ nox -s unit-3.8 -- -k
+
.. note::
The unit tests and system tests are described in the
@@ -93,8 +98,12 @@ On Debian/Ubuntu::
************
Coding Style
************
+- We use the automatic code formatter ``black``. You can run it using
+ the nox session ``blacken``. This will eliminate many lint errors. Run via::
+
+ $ nox -s blacken
-- PEP8 compliance, with exceptions defined in the linter configuration.
+- PEP8 compliance is required, with exceptions defined in the linter configuration.
If you have ``nox`` installed, you can test that you have not introduced
any non-compliant code via::
@@ -133,13 +142,18 @@ Running System Tests
- To run system tests, you can execute::
- $ nox -s system-3.7
+ # Run all system tests
+ $ nox -s system-3.8
$ nox -s system-2.7
+ # Run a single system test
+ $ nox -s system-3.8 -- -k
+
+
.. note::
System tests are only configured to run under Python 2.7 and
- Python 3.7. For expediency, we do not run them in older versions
+ Python 3.8. For expediency, we do not run them in older versions
of Python 3.
This alone will not run the tests. You'll need to change some local
@@ -193,7 +207,7 @@ instead of
``https://github.com/googleapis/python-assured-workloads/blob/master/CONTRIBUTING.rst``)
may cause problems creating links or rendering the description.
-.. _description on PyPI: https://pypi.org/project/google-cloud-assured-workflows
+.. _description on PyPI: https://pypi.org/project/google-cloud-assured-workloads
*************************
diff --git a/LICENSE b/LICENSE
index a8ee855..d645695 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,6 +1,7 @@
- Apache License
+
+ Apache License
Version 2.0, January 2004
- https://www.apache.org/licenses/
+ http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
@@ -192,7 +193,7 @@
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
- https://www.apache.org/licenses/LICENSE-2.0
+ http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
diff --git a/MANIFEST.in b/MANIFEST.in
index e9e29d1..e783f4c 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -16,10 +16,10 @@
# Generated by synthtool. DO NOT EDIT!
include README.rst LICENSE
-recursive-include google *.json *.proto
+recursive-include google *.json *.proto py.typed
recursive-include tests *
global-exclude *.py[co]
global-exclude __pycache__
# Exclude scripts for samples readmegen
-prune scripts/readme-gen
\ No newline at end of file
+prune scripts/readme-gen
diff --git a/docs/_static/custom.css b/docs/_static/custom.css
index 0abaf22..bcd37bb 100644
--- a/docs/_static/custom.css
+++ b/docs/_static/custom.css
@@ -1,4 +1,9 @@
div#python2-eol {
border-color: red;
border-width: medium;
-}
\ No newline at end of file
+}
+
+/* Ensure minimum width for 'Parameters' / 'Returns' column */
+dl.field-list > dt {
+ min-width: 100px
+}
diff --git a/docs/assuredworkloads_v1beta1/assured_workloads_service.rst b/docs/assuredworkloads_v1beta1/assured_workloads_service.rst
new file mode 100644
index 0000000..9291a34
--- /dev/null
+++ b/docs/assuredworkloads_v1beta1/assured_workloads_service.rst
@@ -0,0 +1,11 @@
+AssuredWorkloadsService
+-----------------------------------------
+
+.. automodule:: google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service
+ :members:
+ :inherited-members:
+
+
+.. automodule:: google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service.pagers
+ :members:
+ :inherited-members:
diff --git a/docs/assuredworkloads_v1beta1/services.rst b/docs/assuredworkloads_v1beta1/services.rst
index 584cf0a..abfac62 100644
--- a/docs/assuredworkloads_v1beta1/services.rst
+++ b/docs/assuredworkloads_v1beta1/services.rst
@@ -1,6 +1,6 @@
Services for Google Cloud Assuredworkloads v1beta1 API
======================================================
+.. toctree::
+ :maxdepth: 2
-.. automodule:: google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service
- :members:
- :inherited-members:
+ assured_workloads_service
diff --git a/docs/assuredworkloads_v1beta1/types.rst b/docs/assuredworkloads_v1beta1/types.rst
index 070395b..59990d5 100644
--- a/docs/assuredworkloads_v1beta1/types.rst
+++ b/docs/assuredworkloads_v1beta1/types.rst
@@ -3,4 +3,5 @@ Types for Google Cloud Assuredworkloads v1beta1 API
.. automodule:: google.cloud.assuredworkloads_v1beta1.types
:members:
+ :undoc-members:
:show-inheritance:
diff --git a/docs/conf.py b/docs/conf.py
index 7444fdd..b23c04e 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
#
-# google-cloud-assured-workflows documentation build configuration file
+# google-cloud-assured-workloads documentation build configuration file
#
# This file is execfile()d with the current directory set to its
# containing dir.
@@ -67,7 +67,7 @@
master_doc = "index"
# General information about the project.
-project = u"google-cloud-assured-workflows"
+project = u"google-cloud-assured-workloads"
copyright = u"2019, Google"
author = u"Google APIs"
@@ -140,7 +140,7 @@
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
- "description": "Google Cloud Client Libraries for google-cloud-assured-workflows",
+ "description": "Google Cloud Client Libraries for google-cloud-assured-workloads",
"github_user": "googleapis",
"github_repo": "python-assured-workloads",
"github_banner": True,
@@ -234,7 +234,7 @@
# html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
-htmlhelp_basename = "google-cloud-assured-workflows-doc"
+htmlhelp_basename = "google-cloud-assured-workloads-doc"
# -- Options for warnings ------------------------------------------------------
@@ -267,8 +267,8 @@
latex_documents = [
(
master_doc,
- "google-cloud-assured-workflows.tex",
- u"google-cloud-assured-workflows Documentation",
+ "google-cloud-assured-workloads.tex",
+ u"google-cloud-assured-workloads Documentation",
author,
"manual",
)
@@ -302,8 +302,8 @@
man_pages = [
(
master_doc,
- "google-cloud-assured-workflows",
- u"google-cloud-assured-workflows Documentation",
+ "google-cloud-assured-workloads",
+ u"google-cloud-assured-workloads Documentation",
[author],
1,
)
@@ -321,11 +321,11 @@
texinfo_documents = [
(
master_doc,
- "google-cloud-assured-workflows",
- u"google-cloud-assured-workflows Documentation",
+ "google-cloud-assured-workloads",
+ u"google-cloud-assured-workloads Documentation",
author,
- "google-cloud-assured-workflows",
- "google-cloud-assured-workflows Library",
+ "google-cloud-assured-workloads",
+ "google-cloud-assured-workloads Library",
"APIs",
)
]
diff --git a/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/async_client.py b/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/async_client.py
index d122754..ca738cc 100644
--- a/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/async_client.py
+++ b/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/async_client.py
@@ -88,7 +88,36 @@ class AssuredWorkloadsServiceAsyncClient:
AssuredWorkloadsServiceClient.parse_common_location_path
)
- from_service_account_file = AssuredWorkloadsServiceClient.from_service_account_file
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ AssuredWorkloadsServiceAsyncClient: The constructed client.
+ """
+ return AssuredWorkloadsServiceClient.from_service_account_info.__func__(AssuredWorkloadsServiceAsyncClient, info, *args, **kwargs) # type: ignore
+
+ @classmethod
+ def from_service_account_file(cls, filename: str, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ AssuredWorkloadsServiceAsyncClient: The constructed client.
+ """
+ return AssuredWorkloadsServiceClient.from_service_account_file.__func__(AssuredWorkloadsServiceAsyncClient, filename, *args, **kwargs) # type: ignore
+
from_service_account_json = from_service_account_file
@property
@@ -166,16 +195,17 @@ async def create_workload(
r"""Creates Assured Workload.
Args:
- request (:class:`~.assuredworkloads_v1beta1.CreateWorkloadRequest`):
+ request (:class:`google.cloud.assuredworkloads_v1beta1.types.CreateWorkloadRequest`):
The request object. Request for creating a workload.
parent (:class:`str`):
Required. The resource name of the new Workload's
parent. Must be of the form
``organizations/{org_id}/locations/{location_id}``.
+
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- workload (:class:`~.assuredworkloads_v1beta1.Workload`):
+ workload (:class:`google.cloud.assuredworkloads_v1beta1.types.Workload`):
Required. Assured Workload to create
This corresponds to the ``workload`` field
on the ``request`` instance; if ``request`` is provided, this
@@ -188,13 +218,11 @@ async def create_workload(
sent along with the request as metadata.
Returns:
- ~.operation_async.AsyncOperation:
+ google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
- The result type for the operation will be
- :class:``~.assuredworkloads_v1beta1.Workload``: An
- Workload object for managing highly regulated workloads
- of cloud customers.
+ The result type for the operation will be :class:`google.cloud.assuredworkloads_v1beta1.types.Workload` An Workload object for managing highly regulated workloads of cloud
+ customers.
"""
# Create or coerce a protobuf request object.
@@ -261,19 +289,21 @@ async def update_workload(
workload can be in progress.
Args:
- request (:class:`~.assuredworkloads_v1beta1.UpdateWorkloadRequest`):
+ request (:class:`google.cloud.assuredworkloads_v1beta1.types.UpdateWorkloadRequest`):
The request object. Request for Updating a workload.
- workload (:class:`~.assuredworkloads_v1beta1.Workload`):
+ workload (:class:`google.cloud.assuredworkloads_v1beta1.types.Workload`):
Required. The workload to update. The workload’s
``name`` field is used to identify the workload to be
updated. Format:
organizations/{org_id}/locations/{location_id}/workloads/{workload_id}
+
This corresponds to the ``workload`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- update_mask (:class:`~.field_mask.FieldMask`):
+ update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`):
Required. The list of fields to be
updated.
+
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -285,7 +315,7 @@ async def update_workload(
sent along with the request as metadata.
Returns:
- ~.assuredworkloads_v1beta1.Workload:
+ google.cloud.assuredworkloads_v1beta1.types.Workload:
An Workload object for managing
highly regulated workloads of cloud
customers.
@@ -347,12 +377,13 @@ async def delete_workload(
with a FAILED_PRECONDITION error.
Args:
- request (:class:`~.assuredworkloads_v1beta1.DeleteWorkloadRequest`):
+ request (:class:`google.cloud.assuredworkloads_v1beta1.types.DeleteWorkloadRequest`):
The request object. Request for deleting a Workload.
name (:class:`str`):
Required. The ``name`` field is used to identify the
workload. Format:
organizations/{org_id}/locations/{location_id}/workloads/{workload_id}
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -390,6 +421,7 @@ async def delete_workload(
maximum=30.0,
multiplier=1.3,
predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -418,7 +450,7 @@ async def get_workload(
r"""Gets Assured Workload associated with a CRM Node
Args:
- request (:class:`~.assuredworkloads_v1beta1.GetWorkloadRequest`):
+ request (:class:`google.cloud.assuredworkloads_v1beta1.types.GetWorkloadRequest`):
The request object. Request for fetching a workload.
name (:class:`str`):
Required. The resource name of the Workload to fetch.
@@ -427,6 +459,7 @@ async def get_workload(
"organizations/{organization_id}/locations/{location_id}/workloads/{workload_id}".
For example,
"organizations/123/locations/us-east1/workloads/assured-workload-1".
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -438,7 +471,7 @@ async def get_workload(
sent along with the request as metadata.
Returns:
- ~.assuredworkloads_v1beta1.Workload:
+ google.cloud.assuredworkloads_v1beta1.types.Workload:
An Workload object for managing
highly regulated workloads of cloud
customers.
@@ -471,6 +504,7 @@ async def get_workload(
maximum=30.0,
multiplier=1.3,
predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
@@ -500,13 +534,14 @@ async def list_workloads(
r"""Lists Assured Workloads under a CRM Node.
Args:
- request (:class:`~.assuredworkloads_v1beta1.ListWorkloadsRequest`):
+ request (:class:`google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsRequest`):
The request object. Request for fetching workloads in an
organization.
parent (:class:`str`):
Required. Parent Resource to list workloads from. Must
be of the form
``organizations/{org_id}/locations/{location}``.
+
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -518,7 +553,7 @@ async def list_workloads(
sent along with the request as metadata.
Returns:
- ~.pagers.ListWorkloadsAsyncPager:
+ google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service.pagers.ListWorkloadsAsyncPager:
Response of ListWorkloads endpoint.
Iterating over this object will yield
results and resolve additional pages
@@ -552,6 +587,7 @@ async def list_workloads(
maximum=30.0,
multiplier=1.3,
predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
diff --git a/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/client.py b/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/client.py
index de6ee16..bd9655e 100644
--- a/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/client.py
+++ b/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/client.py
@@ -118,6 +118,22 @@ def _get_default_mtls_endpoint(api_endpoint):
DEFAULT_ENDPOINT
)
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ AssuredWorkloadsServiceClient: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_info(info)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
@@ -130,7 +146,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
kwargs: Additional arguments to pass to the constructor.
Returns:
- {@api.name}: The constructed client.
+ AssuredWorkloadsServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
@@ -238,10 +254,10 @@ def __init__(
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
- transport (Union[str, ~.AssuredWorkloadsServiceTransport]): The
+ transport (Union[str, AssuredWorkloadsServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
- client_options (client_options_lib.ClientOptions): Custom options for the
+ client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
@@ -277,21 +293,17 @@ def __init__(
util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
)
- ssl_credentials = None
+ client_cert_source_func = None
is_mtls = False
if use_client_cert:
if client_options.client_cert_source:
- import grpc # type: ignore
-
- cert, key = client_options.client_cert_source()
- ssl_credentials = grpc.ssl_channel_credentials(
- certificate_chain=cert, private_key=key
- )
is_mtls = True
+ client_cert_source_func = client_options.client_cert_source
else:
- creds = SslCredentials()
- is_mtls = creds.is_mtls
- ssl_credentials = creds.ssl_credentials if is_mtls else None
+ is_mtls = mtls.has_default_client_cert_source()
+ client_cert_source_func = (
+ mtls.default_client_cert_source() if is_mtls else None
+ )
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
@@ -334,7 +346,7 @@ def __init__(
credentials_file=client_options.credentials_file,
host=api_endpoint,
scopes=client_options.scopes,
- ssl_channel_credentials=ssl_credentials,
+ client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
)
@@ -352,16 +364,17 @@ def create_workload(
r"""Creates Assured Workload.
Args:
- request (:class:`~.assuredworkloads_v1beta1.CreateWorkloadRequest`):
+ request (google.cloud.assuredworkloads_v1beta1.types.CreateWorkloadRequest):
The request object. Request for creating a workload.
- parent (:class:`str`):
+ parent (str):
Required. The resource name of the new Workload's
parent. Must be of the form
``organizations/{org_id}/locations/{location_id}``.
+
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- workload (:class:`~.assuredworkloads_v1beta1.Workload`):
+ workload (google.cloud.assuredworkloads_v1beta1.types.Workload):
Required. Assured Workload to create
This corresponds to the ``workload`` field
on the ``request`` instance; if ``request`` is provided, this
@@ -374,13 +387,11 @@ def create_workload(
sent along with the request as metadata.
Returns:
- ~.operation.Operation:
+ google.api_core.operation.Operation:
An object representing a long-running operation.
- The result type for the operation will be
- :class:``~.assuredworkloads_v1beta1.Workload``: An
- Workload object for managing highly regulated workloads
- of cloud customers.
+ The result type for the operation will be :class:`google.cloud.assuredworkloads_v1beta1.types.Workload` An Workload object for managing highly regulated workloads of cloud
+ customers.
"""
# Create or coerce a protobuf request object.
@@ -448,19 +459,21 @@ def update_workload(
workload can be in progress.
Args:
- request (:class:`~.assuredworkloads_v1beta1.UpdateWorkloadRequest`):
+ request (google.cloud.assuredworkloads_v1beta1.types.UpdateWorkloadRequest):
The request object. Request for Updating a workload.
- workload (:class:`~.assuredworkloads_v1beta1.Workload`):
+ workload (google.cloud.assuredworkloads_v1beta1.types.Workload):
Required. The workload to update. The workload’s
``name`` field is used to identify the workload to be
updated. Format:
organizations/{org_id}/locations/{location_id}/workloads/{workload_id}
+
This corresponds to the ``workload`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
- update_mask (:class:`~.field_mask.FieldMask`):
+ update_mask (google.protobuf.field_mask_pb2.FieldMask):
Required. The list of fields to be
updated.
+
This corresponds to the ``update_mask`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -472,7 +485,7 @@ def update_workload(
sent along with the request as metadata.
Returns:
- ~.assuredworkloads_v1beta1.Workload:
+ google.cloud.assuredworkloads_v1beta1.types.Workload:
An Workload object for managing
highly regulated workloads of cloud
customers.
@@ -535,12 +548,13 @@ def delete_workload(
with a FAILED_PRECONDITION error.
Args:
- request (:class:`~.assuredworkloads_v1beta1.DeleteWorkloadRequest`):
+ request (google.cloud.assuredworkloads_v1beta1.types.DeleteWorkloadRequest):
The request object. Request for deleting a Workload.
- name (:class:`str`):
+ name (str):
Required. The ``name`` field is used to identify the
workload. Format:
organizations/{org_id}/locations/{location_id}/workloads/{workload_id}
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -601,15 +615,16 @@ def get_workload(
r"""Gets Assured Workload associated with a CRM Node
Args:
- request (:class:`~.assuredworkloads_v1beta1.GetWorkloadRequest`):
+ request (google.cloud.assuredworkloads_v1beta1.types.GetWorkloadRequest):
The request object. Request for fetching a workload.
- name (:class:`str`):
+ name (str):
Required. The resource name of the Workload to fetch.
This is the workloads's relative path in the API,
formatted as
"organizations/{organization_id}/locations/{location_id}/workloads/{workload_id}".
For example,
"organizations/123/locations/us-east1/workloads/assured-workload-1".
+
This corresponds to the ``name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -621,7 +636,7 @@ def get_workload(
sent along with the request as metadata.
Returns:
- ~.assuredworkloads_v1beta1.Workload:
+ google.cloud.assuredworkloads_v1beta1.types.Workload:
An Workload object for managing
highly regulated workloads of cloud
customers.
@@ -678,13 +693,14 @@ def list_workloads(
r"""Lists Assured Workloads under a CRM Node.
Args:
- request (:class:`~.assuredworkloads_v1beta1.ListWorkloadsRequest`):
+ request (google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsRequest):
The request object. Request for fetching workloads in an
organization.
- parent (:class:`str`):
+ parent (str):
Required. Parent Resource to list workloads from. Must
be of the form
``organizations/{org_id}/locations/{location}``.
+
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
@@ -696,7 +712,7 @@ def list_workloads(
sent along with the request as metadata.
Returns:
- ~.pagers.ListWorkloadsPager:
+ google.cloud.assuredworkloads_v1beta1.services.assured_workloads_service.pagers.ListWorkloadsPager:
Response of ListWorkloads endpoint.
Iterating over this object will yield
results and resolve additional pages
diff --git a/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/pagers.py b/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/pagers.py
index c8418b9..5ff263b 100644
--- a/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/pagers.py
+++ b/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/pagers.py
@@ -15,7 +15,16 @@
# limitations under the License.
#
-from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple
+from typing import (
+ Any,
+ AsyncIterable,
+ Awaitable,
+ Callable,
+ Iterable,
+ Sequence,
+ Tuple,
+ Optional,
+)
from google.cloud.assuredworkloads_v1beta1.types import assuredworkloads_v1beta1
@@ -24,7 +33,7 @@ class ListWorkloadsPager:
"""A pager for iterating through ``list_workloads`` requests.
This class thinly wraps an initial
- :class:`~.assuredworkloads_v1beta1.ListWorkloadsResponse` object, and
+ :class:`google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsResponse` object, and
provides an ``__iter__`` method to iterate through its
``workloads`` field.
@@ -33,7 +42,7 @@ class ListWorkloadsPager:
through the ``workloads`` field on the
corresponding responses.
- All the usual :class:`~.assuredworkloads_v1beta1.ListWorkloadsResponse`
+ All the usual :class:`google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
@@ -51,9 +60,9 @@ def __init__(
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
- request (:class:`~.assuredworkloads_v1beta1.ListWorkloadsRequest`):
+ request (google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsRequest):
The initial request object.
- response (:class:`~.assuredworkloads_v1beta1.ListWorkloadsResponse`):
+ response (google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
@@ -86,7 +95,7 @@ class ListWorkloadsAsyncPager:
"""A pager for iterating through ``list_workloads`` requests.
This class thinly wraps an initial
- :class:`~.assuredworkloads_v1beta1.ListWorkloadsResponse` object, and
+ :class:`google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsResponse` object, and
provides an ``__aiter__`` method to iterate through its
``workloads`` field.
@@ -95,7 +104,7 @@ class ListWorkloadsAsyncPager:
through the ``workloads`` field on the
corresponding responses.
- All the usual :class:`~.assuredworkloads_v1beta1.ListWorkloadsResponse`
+ All the usual :class:`google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsResponse`
attributes are available on the pager. If multiple requests are made, only
the most recent response is retained, and thus used for attribute lookup.
"""
@@ -115,9 +124,9 @@ def __init__(
Args:
method (Callable): The method that was originally called, and
which instantiated this pager.
- request (:class:`~.assuredworkloads_v1beta1.ListWorkloadsRequest`):
+ request (google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsRequest):
The initial request object.
- response (:class:`~.assuredworkloads_v1beta1.ListWorkloadsResponse`):
+ response (google.cloud.assuredworkloads_v1beta1.types.ListWorkloadsResponse):
The initial response object.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
diff --git a/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/base.py b/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/base.py
index 00acfaf..a9e8c02 100644
--- a/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/base.py
+++ b/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/base.py
@@ -72,10 +72,10 @@ def __init__(
scope (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
your own client library.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
@@ -83,6 +83,9 @@ def __init__(
host += ":443"
self._host = host
+ # Save the scopes.
+ self._scopes = scopes or self.AUTH_SCOPES
+
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials and credentials_file:
@@ -92,20 +95,17 @@ def __init__(
if credentials_file is not None:
credentials, _ = auth.load_credentials_from_file(
- credentials_file, scopes=scopes, quota_project_id=quota_project_id
+ credentials_file, scopes=self._scopes, quota_project_id=quota_project_id
)
elif credentials is None:
credentials, _ = auth.default(
- scopes=scopes, quota_project_id=quota_project_id
+ scopes=self._scopes, quota_project_id=quota_project_id
)
# Save the credentials.
self._credentials = credentials
- # Lifted into its own function so it can be stubbed out during tests.
- self._prep_wrapped_messages(client_info)
-
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
@@ -122,6 +122,7 @@ def _prep_wrapped_messages(self, client_info):
maximum=30.0,
multiplier=1.3,
predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
@@ -133,6 +134,7 @@ def _prep_wrapped_messages(self, client_info):
maximum=30.0,
multiplier=1.3,
predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
@@ -144,6 +146,7 @@ def _prep_wrapped_messages(self, client_info):
maximum=30.0,
multiplier=1.3,
predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
+ deadline=60.0,
),
default_timeout=60.0,
client_info=client_info,
diff --git a/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/grpc.py b/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/grpc.py
index 8403d90..e3f4436 100644
--- a/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/grpc.py
+++ b/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/grpc.py
@@ -60,6 +60,7 @@ def __init__(
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
@@ -90,6 +91,10 @@ def __init__(
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
@@ -104,72 +109,61 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
+ self._stubs: Dict[str, Callable] = {}
+ self._operations_client = None
+
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
- # Sanity check: Ensure that channel and credentials are not both
- # provided.
+ # Ignore credentials if a channel was passed.
credentials = False
-
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
- elif api_mtls_endpoint:
- warnings.warn(
- "api_mtls_endpoint and client_cert_source are deprecated",
- DeprecationWarning,
- )
- host = (
- api_mtls_endpoint
- if ":" in api_mtls_endpoint
- else api_mtls_endpoint + ":443"
- )
+ else:
+ if api_mtls_endpoint:
+ host = api_mtls_endpoint
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ self._ssl_channel_credentials = SslCredentials().ssl_credentials
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
-
- # Create SSL credentials with client_cert_source or application
- # default SSL credentials.
- if client_cert_source:
- cert, key = client_cert_source()
- ssl_credentials = grpc.ssl_channel_credentials(
- certificate_chain=cert, private_key=key
- )
else:
- ssl_credentials = SslCredentials().ssl_credentials
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
- # create a new channel. The provided one is ignored.
- self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
- credentials_file=credentials_file,
- ssl_credentials=ssl_credentials,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- options=[
- ("grpc.max_send_message_length", -1),
- ("grpc.max_receive_message_length", -1),
- ],
- )
- self._ssl_channel_credentials = ssl_credentials
- else:
- host = host if ":" in host else host + ":443"
-
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
+ # The base transport sets the host, credentials and scopes
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ client_info=client_info,
+ )
- # create a new channel. The provided one is ignored.
+ if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
+ self._host,
+ credentials=self._credentials,
credentials_file=credentials_file,
- ssl_credentials=ssl_channel_credentials,
- scopes=scopes or self.AUTH_SCOPES,
+ scopes=self._scopes,
+ ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
@@ -177,18 +171,8 @@ def __init__(
],
)
- self._stubs = {} # type: Dict[str, Callable]
- self._operations_client = None
-
- # Run the base constructor.
- super().__init__(
- host=host,
- credentials=credentials,
- credentials_file=credentials_file,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- client_info=client_info,
- )
+ # Wrap messages. This must be done after self._grpc_channel exists
+ self._prep_wrapped_messages(client_info)
@classmethod
def create_channel(
@@ -202,7 +186,7 @@ def create_channel(
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
- address (Optional[str]): The host for the channel to use.
+ host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
diff --git a/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/grpc_asyncio.py b/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/grpc_asyncio.py
index 5add0e1..30e1060 100644
--- a/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/grpc_asyncio.py
+++ b/google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/grpc_asyncio.py
@@ -64,7 +64,7 @@ def create_channel(
) -> aio.Channel:
"""Create and return a gRPC AsyncIO channel object.
Args:
- address (Optional[str]): The host for the channel to use.
+ host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
@@ -104,6 +104,7 @@ def __init__(
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
@@ -135,12 +136,16 @@ def __init__(
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
your own client library.
Raises:
@@ -149,72 +154,61 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
+ self._stubs: Dict[str, Callable] = {}
+ self._operations_client = None
+
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
- # Sanity check: Ensure that channel and credentials are not both
- # provided.
+ # Ignore credentials if a channel was passed.
credentials = False
-
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
- elif api_mtls_endpoint:
- warnings.warn(
- "api_mtls_endpoint and client_cert_source are deprecated",
- DeprecationWarning,
- )
- host = (
- api_mtls_endpoint
- if ":" in api_mtls_endpoint
- else api_mtls_endpoint + ":443"
- )
+ else:
+ if api_mtls_endpoint:
+ host = api_mtls_endpoint
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ self._ssl_channel_credentials = SslCredentials().ssl_credentials
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
-
- # Create SSL credentials with client_cert_source or application
- # default SSL credentials.
- if client_cert_source:
- cert, key = client_cert_source()
- ssl_credentials = grpc.ssl_channel_credentials(
- certificate_chain=cert, private_key=key
- )
else:
- ssl_credentials = SslCredentials().ssl_credentials
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
- # create a new channel. The provided one is ignored.
- self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
- credentials_file=credentials_file,
- ssl_credentials=ssl_credentials,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- options=[
- ("grpc.max_send_message_length", -1),
- ("grpc.max_receive_message_length", -1),
- ],
- )
- self._ssl_channel_credentials = ssl_credentials
- else:
- host = host if ":" in host else host + ":443"
-
- if credentials is None:
- credentials, _ = auth.default(
- scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
- )
+ # The base transport sets the host, credentials and scopes
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ client_info=client_info,
+ )
- # create a new channel. The provided one is ignored.
+ if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
- host,
- credentials=credentials,
+ self._host,
+ credentials=self._credentials,
credentials_file=credentials_file,
- ssl_credentials=ssl_channel_credentials,
- scopes=scopes or self.AUTH_SCOPES,
+ scopes=self._scopes,
+ ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
@@ -222,18 +216,8 @@ def __init__(
],
)
- # Run the base constructor.
- super().__init__(
- host=host,
- credentials=credentials,
- credentials_file=credentials_file,
- scopes=scopes or self.AUTH_SCOPES,
- quota_project_id=quota_project_id,
- client_info=client_info,
- )
-
- self._stubs = {}
- self._operations_client = None
+ # Wrap messages. This must be done after self._grpc_channel exists
+ self._prep_wrapped_messages(client_info)
@property
def grpc_channel(self) -> aio.Channel:
diff --git a/google/cloud/assuredworkloads_v1beta1/types/__init__.py b/google/cloud/assuredworkloads_v1beta1/types/__init__.py
index 3ee899c..5adf85b 100644
--- a/google/cloud/assuredworkloads_v1beta1/types/__init__.py
+++ b/google/cloud/assuredworkloads_v1beta1/types/__init__.py
@@ -16,23 +16,23 @@
#
from .assuredworkloads_v1beta1 import (
+ CreateWorkloadOperationMetadata,
CreateWorkloadRequest,
- UpdateWorkloadRequest,
DeleteWorkloadRequest,
GetWorkloadRequest,
ListWorkloadsRequest,
ListWorkloadsResponse,
+ UpdateWorkloadRequest,
Workload,
- CreateWorkloadOperationMetadata,
)
__all__ = (
+ "CreateWorkloadOperationMetadata",
"CreateWorkloadRequest",
- "UpdateWorkloadRequest",
"DeleteWorkloadRequest",
"GetWorkloadRequest",
"ListWorkloadsRequest",
"ListWorkloadsResponse",
+ "UpdateWorkloadRequest",
"Workload",
- "CreateWorkloadOperationMetadata",
)
diff --git a/google/cloud/assuredworkloads_v1beta1/types/assuredworkloads_v1beta1.py b/google/cloud/assuredworkloads_v1beta1/types/assuredworkloads_v1beta1.py
index 608ceda..96b42e2 100644
--- a/google/cloud/assuredworkloads_v1beta1/types/assuredworkloads_v1beta1.py
+++ b/google/cloud/assuredworkloads_v1beta1/types/assuredworkloads_v1beta1.py
@@ -46,7 +46,7 @@ class CreateWorkloadRequest(proto.Message):
Required. The resource name of the new Workload's parent.
Must be of the form
``organizations/{org_id}/locations/{location_id}``.
- workload (~.assuredworkloads_v1beta1.Workload):
+ workload (google.cloud.assuredworkloads_v1beta1.types.Workload):
Required. Assured Workload to create
external_id (str):
Optional. A identifier associated with the
@@ -68,12 +68,12 @@ class UpdateWorkloadRequest(proto.Message):
r"""Request for Updating a workload.
Attributes:
- workload (~.assuredworkloads_v1beta1.Workload):
+ workload (google.cloud.assuredworkloads_v1beta1.types.Workload):
Required. The workload to update. The workload’s ``name``
field is used to identify the workload to be updated.
Format:
organizations/{org_id}/locations/{location_id}/workloads/{workload_id}
- update_mask (~.field_mask.FieldMask):
+ update_mask (google.protobuf.field_mask_pb2.FieldMask):
Required. The list of fields to be updated.
"""
@@ -149,7 +149,7 @@ class ListWorkloadsResponse(proto.Message):
r"""Response of ListWorkloads endpoint.
Attributes:
- workloads (Sequence[~.assuredworkloads_v1beta1.Workload]):
+ workloads (Sequence[google.cloud.assuredworkloads_v1beta1.types.Workload]):
List of Workloads under a given parent.
next_page_token (str):
The next page token. Return empty if reached
@@ -183,16 +183,16 @@ class Workload(proto.Message):
hyphen, and spaces.
Example: My Workload
- resources (Sequence[~.assuredworkloads_v1beta1.Workload.ResourceInfo]):
+ resources (Sequence[google.cloud.assuredworkloads_v1beta1.types.Workload.ResourceInfo]):
Output only. The resources associated with
this workload. These resources will be created
when creating the workload. If any of the
projects already exist, the workload creation
will fail. Always read only.
- compliance_regime (~.assuredworkloads_v1beta1.Workload.ComplianceRegime):
+ compliance_regime (google.cloud.assuredworkloads_v1beta1.types.Workload.ComplianceRegime):
Required. Immutable. Compliance Regime
associated with this workload.
- create_time (~.timestamp.Timestamp):
+ create_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. Immutable. The Workload creation
timestamp.
billing_account (str):
@@ -204,16 +204,16 @@ class Workload(proto.Message):
assigned billing account. The resource name has the form
``billingAccounts/{billing_account_id}``. For example,
``billingAccounts/012345-567890-ABCDEF``.
- il4_settings (~.assuredworkloads_v1beta1.Workload.IL4Settings):
+ il4_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.IL4Settings):
Required. Input only. Immutable. Settings
specific to resources needed for IL4.
- cjis_settings (~.assuredworkloads_v1beta1.Workload.CJISSettings):
+ cjis_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.CJISSettings):
Required. Input only. Immutable. Settings
specific to resources needed for CJIS.
- fedramp_high_settings (~.assuredworkloads_v1beta1.Workload.FedrampHighSettings):
+ fedramp_high_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.FedrampHighSettings):
Required. Input only. Immutable. Settings
specific to resources needed for FedRAMP High.
- fedramp_moderate_settings (~.assuredworkloads_v1beta1.Workload.FedrampModerateSettings):
+ fedramp_moderate_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.FedrampModerateSettings):
Required. Input only. Immutable. Settings
specific to resources needed for FedRAMP
Moderate.
@@ -222,7 +222,7 @@ class Workload(proto.Message):
calculated on the basis of the Workload
contents. It will be used in Update & Delete
operations.
- labels (Sequence[~.assuredworkloads_v1beta1.Workload.LabelsEntry]):
+ labels (Sequence[google.cloud.assuredworkloads_v1beta1.types.Workload.LabelsEntry]):
Optional. Labels applied to the workload.
provisioned_resources_parent (str):
Input only. The parent resource for the resources managed by
@@ -231,11 +231,17 @@ class Workload(proto.Message):
If not specified all resources are created under the
Workload parent. Formats: folders/{folder_id}
organizations/{organization_id}
- kms_settings (~.assuredworkloads_v1beta1.Workload.KMSSettings):
+ kms_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.KMSSettings):
Input only. Settings used to create a CMEK
crypto key. When set a project with a KMS CMEK
key is provisioned. This field is mandatory for
a subset of Compliance Regimes.
+ resource_settings (Sequence[google.cloud.assuredworkloads_v1beta1.types.Workload.ResourceSettings]):
+ Input only. Resource properties that are used
+ to customize workload resources. These
+ properties (such as custom project id) will be
+ used to create workload resources if possible.
+ This field is optional.
"""
class ComplianceRegime(proto.Enum):
@@ -245,6 +251,9 @@ class ComplianceRegime(proto.Enum):
CJIS = 2
FEDRAMP_HIGH = 3
FEDRAMP_MODERATE = 4
+ US_REGIONAL_ACCESS = 5
+ HIPAA = 6
+ HITRUST = 7
class ResourceInfo(proto.Message):
r"""Represent the resources that are children of this Workload.
@@ -253,7 +262,7 @@ class ResourceInfo(proto.Message):
resource_id (int):
Resource identifier. For a project this represents
project_number.
- resource_type (~.assuredworkloads_v1beta1.Workload.ResourceInfo.ResourceType):
+ resource_type (google.cloud.assuredworkloads_v1beta1.types.Workload.ResourceInfo.ResourceType):
Indicates the type of resource.
"""
@@ -273,12 +282,12 @@ class KMSSettings(proto.Message):
r"""Settings specific to the Key Management Service.
Attributes:
- next_rotation_time (~.timestamp.Timestamp):
+ next_rotation_time (google.protobuf.timestamp_pb2.Timestamp):
Required. Input only. Immutable. The time at
which the Key Management Service will
automatically create a new version of the crypto
key and mark it as the primary.
- rotation_period (~.duration.Duration):
+ rotation_period (google.protobuf.duration_pb2.Duration):
Required. Input only. Immutable. [next_rotation_time] will
be advanced by this period when the Key Management Service
automatically rotates a key. Must be at least 24 hours and
@@ -297,7 +306,7 @@ class IL4Settings(proto.Message):
r"""Settings specific to resources needed for IL4.
Attributes:
- kms_settings (~.assuredworkloads_v1beta1.Workload.KMSSettings):
+ kms_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.KMSSettings):
Required. Input only. Immutable. Settings
used to create a CMEK crypto key.
"""
@@ -310,7 +319,7 @@ class CJISSettings(proto.Message):
r"""Settings specific to resources needed for CJIS.
Attributes:
- kms_settings (~.assuredworkloads_v1beta1.Workload.KMSSettings):
+ kms_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.KMSSettings):
Required. Input only. Immutable. Settings
used to create a CMEK crypto key.
"""
@@ -323,7 +332,7 @@ class FedrampHighSettings(proto.Message):
r"""Settings specific to resources needed for FedRAMP High.
Attributes:
- kms_settings (~.assuredworkloads_v1beta1.Workload.KMSSettings):
+ kms_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.KMSSettings):
Required. Input only. Immutable. Settings
used to create a CMEK crypto key.
"""
@@ -336,7 +345,7 @@ class FedrampModerateSettings(proto.Message):
r"""Settings specific to resources needed for FedRAMP Moderate.
Attributes:
- kms_settings (~.assuredworkloads_v1beta1.Workload.KMSSettings):
+ kms_settings (google.cloud.assuredworkloads_v1beta1.types.Workload.KMSSettings):
Required. Input only. Immutable. Settings
used to create a CMEK crypto key.
"""
@@ -345,6 +354,27 @@ class FedrampModerateSettings(proto.Message):
proto.MESSAGE, number=1, message="Workload.KMSSettings",
)
+ class ResourceSettings(proto.Message):
+ r"""Represent the custom settings for the resources to be
+ created.
+
+ Attributes:
+ resource_id (str):
+ Resource identifier. For a project this represents
+ project_id. If the project is already taken, the workload
+ creation will fail.
+ resource_type (google.cloud.assuredworkloads_v1beta1.types.Workload.ResourceInfo.ResourceType):
+ Indicates the type of resource. This field should be
+ specified to correspond the id to the right project type
+ (CONSUMER_PROJECT or ENCRYPTION_KEYS_PROJECT)
+ """
+
+ resource_id = proto.Field(proto.STRING, number=1)
+
+ resource_type = proto.Field(
+ proto.ENUM, number=2, enum="Workload.ResourceInfo.ResourceType",
+ )
+
name = proto.Field(proto.STRING, number=1)
display_name = proto.Field(proto.STRING, number=2)
@@ -393,19 +423,23 @@ class FedrampModerateSettings(proto.Message):
kms_settings = proto.Field(proto.MESSAGE, number=14, message=KMSSettings,)
+ resource_settings = proto.RepeatedField(
+ proto.MESSAGE, number=15, message=ResourceSettings,
+ )
+
class CreateWorkloadOperationMetadata(proto.Message):
r"""Operation metadata to give request details of CreateWorkload.
Attributes:
- create_time (~.timestamp.Timestamp):
+ create_time (google.protobuf.timestamp_pb2.Timestamp):
Optional. Time when the operation was
created.
display_name (str):
Optional. The display name of the workload.
parent (str):
Optional. The parent of the workload.
- compliance_regime (~.assuredworkloads_v1beta1.Workload.ComplianceRegime):
+ compliance_regime (google.cloud.assuredworkloads_v1beta1.types.Workload.ComplianceRegime):
Optional. Compliance controls that should be
applied to the resources managed by the
workload.
diff --git a/noxfile.py b/noxfile.py
index a57e24b..3d6f855 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -18,6 +18,7 @@
from __future__ import absolute_import
import os
+import pathlib
import shutil
import nox
@@ -30,6 +31,22 @@
SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"]
UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"]
+CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()
+
+# 'docfx' is excluded since it only needs to run in 'docs-presubmit'
+nox.options.sessions = [
+ "unit",
+ "system",
+ "cover",
+ "lint",
+ "lint_setup_py",
+ "blacken",
+ "docs",
+]
+
+# Error if a python version is missing
+nox.options.error_on_missing_interpreters = True
+
@nox.session(python=DEFAULT_PYTHON_VERSION)
def lint(session):
@@ -70,19 +87,22 @@ def lint_setup_py(session):
def default(session):
# Install all test dependencies, then install this package in-place.
- session.install("asyncmock", "pytest-asyncio")
- session.install(
- "mock", "pytest", "pytest-cov",
+ constraints_path = str(
+ CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
)
- session.install("-e", ".")
+ session.install("asyncmock", "pytest-asyncio", "-c", constraints_path)
+
+ session.install("mock", "pytest", "pytest-cov", "-c", constraints_path)
+
+ session.install("-e", ".", "-c", constraints_path)
# Run py.test against the unit tests.
session.run(
"py.test",
"--quiet",
+ f"--junitxml=unit_{session.python}_sponge_log.xml",
"--cov=google/cloud",
- "--cov=tests/unit",
"--cov-append",
"--cov-config=.coveragerc",
"--cov-report=",
@@ -101,6 +121,9 @@ def unit(session):
@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS)
def system(session):
"""Run the system test suite."""
+ constraints_path = str(
+ CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt"
+ )
system_test_path = os.path.join("tests", "system.py")
system_test_folder_path = os.path.join("tests", "system")
@@ -110,6 +133,9 @@ def system(session):
# Sanity check: Only run tests if the environment variable is set.
if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""):
session.skip("Credentials must be set via environment variable")
+ # Install pyopenssl for mTLS testing.
+ if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true":
+ session.install("pyopenssl")
system_test_exists = os.path.exists(system_test_path)
system_test_folder_exists = os.path.exists(system_test_folder_path)
@@ -122,16 +148,26 @@ def system(session):
# Install all test dependencies, then install this package into the
# virtualenv's dist-packages.
- session.install(
- "mock", "pytest", "google-cloud-testutils",
- )
- session.install("-e", ".")
+ session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path)
+ session.install("-e", ".", "-c", constraints_path)
# Run py.test against the system tests.
if system_test_exists:
- session.run("py.test", "--quiet", system_test_path, *session.posargs)
+ session.run(
+ "py.test",
+ "--quiet",
+ f"--junitxml=system_{session.python}_sponge_log.xml",
+ system_test_path,
+ *session.posargs,
+ )
if system_test_folder_exists:
- session.run("py.test", "--quiet", system_test_folder_path, *session.posargs)
+ session.run(
+ "py.test",
+ "--quiet",
+ f"--junitxml=system_{session.python}_sponge_log.xml",
+ system_test_folder_path,
+ *session.posargs,
+ )
@nox.session(python=DEFAULT_PYTHON_VERSION)
@@ -174,9 +210,7 @@ def docfx(session):
"""Build the docfx yaml files for this library."""
session.install("-e", ".")
- # sphinx-docfx-yaml supports up to sphinx version 1.5.5.
- # https://github.com/docascode/sphinx-docfx-yaml/issues/97
- session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml")
+ session.install("sphinx", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml")
shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
session.run(
diff --git a/renovate.json b/renovate.json
index 4fa9493..f08bc22 100644
--- a/renovate.json
+++ b/renovate.json
@@ -1,5 +1,6 @@
{
"extends": [
"config:base", ":preserveSemverRanges"
- ]
+ ],
+ "ignorePaths": [".pre-commit-config.yaml"]
}
diff --git a/setup.py b/setup.py
index c8678dc..7f03abe 100644
--- a/setup.py
+++ b/setup.py
@@ -41,7 +41,6 @@
include_package_data=True,
install_requires=(
"google-api-core[grpc] >= 1.22.2, < 2.0.0dev",
- "libcst >= 0.2.5",
"proto-plus >= 1.4.0",
),
python_requires=">=3.6",
diff --git a/synth.metadata b/synth.metadata
index 60612d0..976e74e 100644
--- a/synth.metadata
+++ b/synth.metadata
@@ -3,30 +3,30 @@
{
"git": {
"name": ".",
- "remote": "https://github.com/googleapis/python-assured-workloads.git",
- "sha": "2f833d21fcee8a5c8ab402d39a2e06aa6e34e24b"
+ "remote": "git@github.com:googleapis/python-assured-workloads",
+ "sha": "0bde7568a7c312c407902bd5815e4f0db46b7cfd"
}
},
{
"git": {
"name": "googleapis",
"remote": "https://github.com/googleapis/googleapis.git",
- "sha": "dd372aa22ded7a8ba6f0e03a80e06358a3fa0907",
- "internalRef": "347055288"
+ "sha": "56fc6d43fed71188d7e18f3ca003544646c4ab35",
+ "internalRef": "366346972"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "373861061648b5fe5e0ac4f8a38b32d639ee93e4"
+ "sha": "ff39353f34a36e7643b86e97724e4027ab466dc6"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "373861061648b5fe5e0ac4f8a38b32d639ee93e4"
+ "sha": "ff39353f34a36e7643b86e97724e4027ab466dc6"
}
}
],
@@ -40,91 +40,5 @@
"generator": "bazel"
}
}
- ],
- "generatedFiles": [
- ".flake8",
- ".github/CONTRIBUTING.md",
- ".github/ISSUE_TEMPLATE/bug_report.md",
- ".github/ISSUE_TEMPLATE/feature_request.md",
- ".github/ISSUE_TEMPLATE/support_request.md",
- ".github/PULL_REQUEST_TEMPLATE.md",
- ".github/release-please.yml",
- ".github/snippet-bot.yml",
- ".gitignore",
- ".kokoro/build.sh",
- ".kokoro/continuous/common.cfg",
- ".kokoro/continuous/continuous.cfg",
- ".kokoro/docker/docs/Dockerfile",
- ".kokoro/docker/docs/fetch_gpg_keys.sh",
- ".kokoro/docs/common.cfg",
- ".kokoro/docs/docs-presubmit.cfg",
- ".kokoro/docs/docs.cfg",
- ".kokoro/populate-secrets.sh",
- ".kokoro/presubmit/common.cfg",
- ".kokoro/presubmit/presubmit.cfg",
- ".kokoro/publish-docs.sh",
- ".kokoro/release.sh",
- ".kokoro/release/common.cfg",
- ".kokoro/release/release.cfg",
- ".kokoro/samples/lint/common.cfg",
- ".kokoro/samples/lint/continuous.cfg",
- ".kokoro/samples/lint/periodic.cfg",
- ".kokoro/samples/lint/presubmit.cfg",
- ".kokoro/samples/python3.6/common.cfg",
- ".kokoro/samples/python3.6/continuous.cfg",
- ".kokoro/samples/python3.6/periodic.cfg",
- ".kokoro/samples/python3.6/presubmit.cfg",
- ".kokoro/samples/python3.7/common.cfg",
- ".kokoro/samples/python3.7/continuous.cfg",
- ".kokoro/samples/python3.7/periodic.cfg",
- ".kokoro/samples/python3.7/presubmit.cfg",
- ".kokoro/samples/python3.8/common.cfg",
- ".kokoro/samples/python3.8/continuous.cfg",
- ".kokoro/samples/python3.8/periodic.cfg",
- ".kokoro/samples/python3.8/presubmit.cfg",
- ".kokoro/test-samples.sh",
- ".kokoro/trampoline.sh",
- ".kokoro/trampoline_v2.sh",
- ".pre-commit-config.yaml",
- ".trampolinerc",
- "CODE_OF_CONDUCT.md",
- "CONTRIBUTING.rst",
- "LICENSE",
- "MANIFEST.in",
- "docs/_static/custom.css",
- "docs/_templates/layout.html",
- "docs/assuredworkloads_v1beta1/services.rst",
- "docs/assuredworkloads_v1beta1/types.rst",
- "docs/conf.py",
- "docs/multiprocessing.rst",
- "google/cloud/assuredworkloads/__init__.py",
- "google/cloud/assuredworkloads/py.typed",
- "google/cloud/assuredworkloads_v1beta1/__init__.py",
- "google/cloud/assuredworkloads_v1beta1/py.typed",
- "google/cloud/assuredworkloads_v1beta1/services/__init__.py",
- "google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/__init__.py",
- "google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/async_client.py",
- "google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/client.py",
- "google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/pagers.py",
- "google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/__init__.py",
- "google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/base.py",
- "google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/grpc.py",
- "google/cloud/assuredworkloads_v1beta1/services/assured_workloads_service/transports/grpc_asyncio.py",
- "google/cloud/assuredworkloads_v1beta1/types/__init__.py",
- "google/cloud/assuredworkloads_v1beta1/types/assuredworkloads_v1beta1.py",
- "mypy.ini",
- "noxfile.py",
- "renovate.json",
- "scripts/decrypt-secrets.sh",
- "scripts/readme-gen/readme_gen.py",
- "scripts/readme-gen/templates/README.tmpl.rst",
- "scripts/readme-gen/templates/auth.tmpl.rst",
- "scripts/readme-gen/templates/auth_api_key.tmpl.rst",
- "scripts/readme-gen/templates/install_deps.tmpl.rst",
- "scripts/readme-gen/templates/install_portaudio.tmpl.rst",
- "setup.cfg",
- "testing/.gitignore",
- "tests/unit/gapic/assuredworkloads_v1beta1/__init__.py",
- "tests/unit/gapic/assuredworkloads_v1beta1/test_assured_workloads_service.py"
]
}
\ No newline at end of file
diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt
index c933887..a37a34a 100644
--- a/testing/constraints-3.6.txt
+++ b/testing/constraints-3.6.txt
@@ -6,5 +6,4 @@
# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev",
# Then this file should have foo==1.14.0
google-api-core==1.22.2
-libcst==0.2.5
-proto-plus==1.4.0
\ No newline at end of file
+proto-plus==1.4.0
diff --git a/tests/unit/gapic/assuredworkloads_v1beta1/__init__.py b/tests/unit/gapic/assuredworkloads_v1beta1/__init__.py
index 8b13789..42ffdf2 100644
--- a/tests/unit/gapic/assuredworkloads_v1beta1/__init__.py
+++ b/tests/unit/gapic/assuredworkloads_v1beta1/__init__.py
@@ -1 +1,16 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/tests/unit/gapic/assuredworkloads_v1beta1/test_assured_workloads_service.py b/tests/unit/gapic/assuredworkloads_v1beta1/test_assured_workloads_service.py
index 0998889..46f3435 100644
--- a/tests/unit/gapic/assuredworkloads_v1beta1/test_assured_workloads_service.py
+++ b/tests/unit/gapic/assuredworkloads_v1beta1/test_assured_workloads_service.py
@@ -101,7 +101,24 @@ def test__get_default_mtls_endpoint():
@pytest.mark.parametrize(
- "client_class", [AssuredWorkloadsServiceClient, AssuredWorkloadsServiceAsyncClient]
+ "client_class", [AssuredWorkloadsServiceClient, AssuredWorkloadsServiceAsyncClient,]
+)
+def test_assured_workloads_service_client_from_service_account_info(client_class):
+ creds = credentials.AnonymousCredentials()
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_info"
+ ) as factory:
+ factory.return_value = creds
+ info = {"valid": True}
+ client = client_class.from_service_account_info(info)
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ assert client.transport._host == "assuredworkloads.googleapis.com:443"
+
+
+@pytest.mark.parametrize(
+ "client_class", [AssuredWorkloadsServiceClient, AssuredWorkloadsServiceAsyncClient,]
)
def test_assured_workloads_service_client_from_service_account_file(client_class):
creds = credentials.AnonymousCredentials()
@@ -111,16 +128,21 @@ def test_assured_workloads_service_client_from_service_account_file(client_class
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
assert client.transport._host == "assuredworkloads.googleapis.com:443"
def test_assured_workloads_service_client_get_transport_class():
transport = AssuredWorkloadsServiceClient.get_transport_class()
- assert transport == transports.AssuredWorkloadsServiceGrpcTransport
+ available_transports = [
+ transports.AssuredWorkloadsServiceGrpcTransport,
+ ]
+ assert transport in available_transports
transport = AssuredWorkloadsServiceClient.get_transport_class("grpc")
assert transport == transports.AssuredWorkloadsServiceGrpcTransport
@@ -175,7 +197,7 @@ def test_assured_workloads_service_client_client_options(
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -191,7 +213,7 @@ def test_assured_workloads_service_client_client_options(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -207,7 +229,7 @@ def test_assured_workloads_service_client_client_options(
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -235,7 +257,7 @@ def test_assured_workloads_service_client_client_options(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -296,29 +318,25 @@ def test_assured_workloads_service_client_mtls_env_auto(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
- ssl_channel_creds = mock.Mock()
- with mock.patch(
- "grpc.ssl_channel_credentials", return_value=ssl_channel_creds
- ):
- patched.return_value = None
- client = client_class(client_options=options)
+ patched.return_value = None
+ client = client_class(client_options=options)
- if use_client_cert_env == "false":
- expected_ssl_channel_creds = None
- expected_host = client.DEFAULT_ENDPOINT
- else:
- expected_ssl_channel_creds = ssl_channel_creds
- expected_host = client.DEFAULT_MTLS_ENDPOINT
+ if use_client_cert_env == "false":
+ expected_client_cert_source = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_client_cert_source = client_cert_source_callback
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=expected_host,
- scopes=None,
- ssl_channel_credentials=expected_ssl_channel_creds,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- )
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
@@ -327,66 +345,53 @@ def test_assured_workloads_service_client_mtls_env_auto(
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
- "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=True,
):
with mock.patch(
- "google.auth.transport.grpc.SslCredentials.is_mtls",
- new_callable=mock.PropertyMock,
- ) as is_mtls_mock:
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.ssl_credentials",
- new_callable=mock.PropertyMock,
- ) as ssl_credentials_mock:
- if use_client_cert_env == "false":
- is_mtls_mock.return_value = False
- ssl_credentials_mock.return_value = None
- expected_host = client.DEFAULT_ENDPOINT
- expected_ssl_channel_creds = None
- else:
- is_mtls_mock.return_value = True
- ssl_credentials_mock.return_value = mock.Mock()
- expected_host = client.DEFAULT_MTLS_ENDPOINT
- expected_ssl_channel_creds = (
- ssl_credentials_mock.return_value
- )
-
- patched.return_value = None
- client = client_class()
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=expected_host,
- scopes=None,
- ssl_channel_credentials=expected_ssl_channel_creds,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- )
+ "google.auth.transport.mtls.default_client_cert_source",
+ return_value=client_cert_source_callback,
+ ):
+ if use_client_cert_env == "false":
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_client_cert_source = None
+ else:
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_client_cert_source = client_cert_source_callback
- # Check the case client_cert_source and ADC client cert are not provided.
- with mock.patch.dict(
- os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
- ):
- with mock.patch.object(transport_class, "__init__") as patched:
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
- ):
- with mock.patch(
- "google.auth.transport.grpc.SslCredentials.is_mtls",
- new_callable=mock.PropertyMock,
- ) as is_mtls_mock:
- is_mtls_mock.return_value = False
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
+ host=expected_host,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=False,
+ ):
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
@@ -416,7 +421,7 @@ def test_assured_workloads_service_client_client_options_scopes(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -450,7 +455,7 @@ def test_assured_workloads_service_client_client_options_credentials_file(
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -469,7 +474,7 @@ def test_assured_workloads_service_client_client_options_from_dict():
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- ssl_channel_credentials=None,
+ client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -507,6 +512,22 @@ def test_create_workload_from_dict():
test_create_workload(request_type=dict)
+def test_create_workload_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = AssuredWorkloadsServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.create_workload), "__call__") as call:
+ client.create_workload()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == assuredworkloads_v1beta1.CreateWorkloadRequest()
+
+
@pytest.mark.asyncio
async def test_create_workload_async(
transport: str = "grpc_asyncio",
@@ -748,6 +769,22 @@ def test_update_workload_from_dict():
test_update_workload(request_type=dict)
+def test_update_workload_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = AssuredWorkloadsServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.update_workload), "__call__") as call:
+ client.update_workload()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == assuredworkloads_v1beta1.UpdateWorkloadRequest()
+
+
@pytest.mark.asyncio
async def test_update_workload_async(
transport: str = "grpc_asyncio",
@@ -987,6 +1024,22 @@ def test_delete_workload_from_dict():
test_delete_workload(request_type=dict)
+def test_delete_workload_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = AssuredWorkloadsServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.delete_workload), "__call__") as call:
+ client.delete_workload()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == assuredworkloads_v1beta1.DeleteWorkloadRequest()
+
+
@pytest.mark.asyncio
async def test_delete_workload_async(
transport: str = "grpc_asyncio",
@@ -1208,6 +1261,22 @@ def test_get_workload_from_dict():
test_get_workload(request_type=dict)
+def test_get_workload_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = AssuredWorkloadsServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_workload), "__call__") as call:
+ client.get_workload()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == assuredworkloads_v1beta1.GetWorkloadRequest()
+
+
@pytest.mark.asyncio
async def test_get_workload_async(
transport: str = "grpc_asyncio",
@@ -1434,6 +1503,22 @@ def test_list_workloads_from_dict():
test_list_workloads(request_type=dict)
+def test_list_workloads_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = AssuredWorkloadsServiceClient(
+ credentials=credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_workloads), "__call__") as call:
+ client.list_workloads()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == assuredworkloads_v1beta1.ListWorkloadsRequest()
+
+
@pytest.mark.asyncio
async def test_list_workloads_async(
transport: str = "grpc_asyncio",
@@ -1943,6 +2028,53 @@ def test_assured_workloads_service_transport_auth_adc():
)
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.AssuredWorkloadsServiceGrpcTransport,
+ transports.AssuredWorkloadsServiceGrpcAsyncIOTransport,
+ ],
+)
+def test_assured_workloads_service_grpc_transport_client_cert_source_for_mtls(
+ transport_class,
+):
+ cred = credentials.AnonymousCredentials()
+
+ # Check ssl_channel_credentials is used if provided.
+ with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
+ mock_ssl_channel_creds = mock.Mock()
+ transport_class(
+ host="squid.clam.whelk",
+ credentials=cred,
+ ssl_channel_credentials=mock_ssl_channel_creds,
+ )
+ mock_create_channel.assert_called_once_with(
+ "squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_channel_creds,
+ quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
+ # is used.
+ with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
+ with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
+ transport_class(
+ credentials=cred,
+ client_cert_source_for_mtls=client_cert_source_callback,
+ )
+ expected_cert, expected_key = client_cert_source_callback()
+ mock_ssl_cred.assert_called_once_with(
+ certificate_chain=expected_cert, private_key=expected_key
+ )
+
+
def test_assured_workloads_service_host_no_port():
client = AssuredWorkloadsServiceClient(
credentials=credentials.AnonymousCredentials(),
@@ -1964,7 +2096,7 @@ def test_assured_workloads_service_host_with_port():
def test_assured_workloads_service_grpc_transport_channel():
- channel = grpc.insecure_channel("http://localhost/")
+ channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.AssuredWorkloadsServiceGrpcTransport(
@@ -1976,7 +2108,7 @@ def test_assured_workloads_service_grpc_transport_channel():
def test_assured_workloads_service_grpc_asyncio_transport_channel():
- channel = aio.insecure_channel("http://localhost/")
+ channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.AssuredWorkloadsServiceGrpcAsyncIOTransport(
@@ -1987,6 +2119,8 @@ def test_assured_workloads_service_grpc_asyncio_transport_channel():
assert transport._ssl_channel_credentials == None
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
@@ -2001,7 +2135,7 @@ def test_assured_workloads_service_transport_channel_mtls_with_client_cert_sourc
"grpc.ssl_channel_credentials", autospec=True
) as grpc_ssl_channel_cred:
with mock.patch.object(
- transport_class, "create_channel", autospec=True
+ transport_class, "create_channel"
) as grpc_create_channel:
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
@@ -2039,6 +2173,8 @@ def test_assured_workloads_service_transport_channel_mtls_with_client_cert_sourc
assert transport._ssl_channel_credentials == mock_ssl_cred
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
@@ -2054,7 +2190,7 @@ def test_assured_workloads_service_transport_channel_mtls_with_adc(transport_cla
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
with mock.patch.object(
- transport_class, "create_channel", autospec=True
+ transport_class, "create_channel"
) as grpc_create_channel:
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel