diff --git a/.coveragerc b/.coveragerc index 6abe3a19..f7930cf8 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,27 +1,11 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! [run] branch = True [report] fail_under = 100 show_missing = True -omit = google/cloud/iot/__init__.py +omit = + google/cloud/iot/__init__.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER @@ -31,4 +15,4 @@ exclude_lines = # This is added at the module level as a safeguard for if someone # generates the code and tries to run it without pip installing. This # makes it virtually impossible to test properly. - except pkg_resources.DistributionNotFound \ No newline at end of file + except pkg_resources.DistributionNotFound diff --git a/.flake8 b/.flake8 index ed931638..29227d4c 100644 --- a/.flake8 +++ b/.flake8 @@ -26,6 +26,7 @@ exclude = *_pb2.py # Standard linting exemptions. + **/.nox/** __pycache__, .git, *.pyc, diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml new file mode 100644 index 00000000..fc281c05 --- /dev/null +++ b/.github/header-checker-lint.yml @@ -0,0 +1,15 @@ +{"allowedCopyrightHolders": ["Google LLC"], + "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], + "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt"], + "sourceFileExtensions": [ + "ts", + "js", + "java", + "sh", + "Dockerfile", + "yaml", + "py", + "html", + "txt" + ] +} \ No newline at end of file diff --git a/.gitignore b/.gitignore index b9daa52f..b4243ced 100644 --- a/.gitignore +++ b/.gitignore @@ -50,8 +50,10 @@ docs.metadata # Virtual environment env/ + +# Test logs coverage.xml -sponge_log.xml +*sponge_log.xml # System test environment variables. system_tests/local_test_setup diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 1ecff726..299ae80e 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -15,7 +15,11 @@ set -eo pipefail -cd github/python-iot +if [[ -z "${PROJECT_ROOT:-}" ]]; then + PROJECT_ROOT="github/python-iot" +fi + +cd "${PROJECT_ROOT}" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -30,16 +34,26 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") # Remove old nox -python3.6 -m pip uninstall --yes --quiet nox-automation +python3 -m pip uninstall --yes --quiet nox-automation # Install nox -python3.6 -m pip install --upgrade --quiet nox -python3.6 -m nox --version +python3 -m pip install --upgrade --quiet nox +python3 -m nox --version + +# If this is a continuous build, send the test log to the FlakyBot. +# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then + cleanup() { + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + } + trap cleanup EXIT HUP +fi # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then - python3.6 -m nox -s "${NOX_SESSION:-}" + python3 -m nox -s ${NOX_SESSION:-} else - python3.6 -m nox + python3 -m nox fi diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg index 3baaac83..8d2b6443 100644 --- a/.kokoro/docs/common.cfg +++ b/.kokoro/docs/common.cfg @@ -30,7 +30,7 @@ env_vars: { env_vars: { key: "V2_STAGING_BUCKET" - value: "docs-staging-v2-staging" + value: "docs-staging-v2" } # It will upload the docker image after successful builds. diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg index 11181078..7211cbd1 100644 --- a/.kokoro/docs/docs-presubmit.cfg +++ b/.kokoro/docs/docs-presubmit.cfg @@ -15,3 +15,14 @@ env_vars: { key: "TRAMPOLINE_IMAGE_UPLOAD" value: "false" } + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-iot/.kokoro/build.sh" +} + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "docs docfx" +} diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh new file mode 100755 index 00000000..f5251425 --- /dev/null +++ b/.kokoro/populate-secrets.sh @@ -0,0 +1,43 @@ +#!/bin/bash +# Copyright 2020 Google LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;} +function msg { println "$*" >&2 ;} +function println { printf '%s\n' "$(now) $*" ;} + + +# Populates requested secrets set in SECRET_MANAGER_KEYS from service account: +# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com +SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" +msg "Creating folder on disk for secrets: ${SECRET_LOCATION}" +mkdir -p ${SECRET_LOCATION} +for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g") +do + msg "Retrieving secret ${key}" + docker run --entrypoint=gcloud \ + --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \ + gcr.io/google.com/cloudsdktool/cloud-sdk \ + secrets versions access latest \ + --project cloud-devrel-kokoro-resources \ + --secret ${key} > \ + "${SECRET_LOCATION}/${key}" + if [[ $? == 0 ]]; then + msg "Secret written to ${SECRET_LOCATION}/${key}" + else + msg "Error retrieving secret ${key}" + fi +done diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index fb646a77..da091068 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -23,42 +23,18 @@ env_vars: { value: "github/python-iot/.kokoro/release.sh" } -# Fetch the token needed for reporting release status to GitHub -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "yoshi-automation-github-key" - } - } -} - -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google_cloud_pypi_password" - } - } -} - -# Fetch magictoken to use with Magic Github Proxy -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "releasetool-magictoken" - } - } +# Fetch PyPI password +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google_cloud_pypi_password" + } + } } -# Fetch api key to use with Magic Github Proxy -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "magic-github-proxy-api-key" - } - } -} +# Tokens needed to report release status back to GitHub +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg index 48bcae3a..1d2fcd9c 100644 --- a/.kokoro/samples/python3.6/common.cfg +++ b/.kokoro/samples/python3.6/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.6" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py36" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-iot/.kokoro/test-samples.sh" diff --git a/.kokoro/samples/python3.6/periodic-head.cfg b/.kokoro/samples/python3.6/periodic-head.cfg new file mode 100644 index 00000000..f9cfcd33 --- /dev/null +++ b/.kokoro/samples/python3.6/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg index c32738b9..b9a0e192 100644 --- a/.kokoro/samples/python3.7/common.cfg +++ b/.kokoro/samples/python3.7/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.7" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py37" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-iot/.kokoro/test-samples.sh" diff --git a/.kokoro/samples/python3.7/periodic-head.cfg b/.kokoro/samples/python3.7/periodic-head.cfg new file mode 100644 index 00000000..f9cfcd33 --- /dev/null +++ b/.kokoro/samples/python3.7/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg index 29f96249..42080967 100644 --- a/.kokoro/samples/python3.8/common.cfg +++ b/.kokoro/samples/python3.8/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.8" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py38" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-iot/.kokoro/test-samples.sh" diff --git a/.kokoro/samples/python3.8/periodic-head.cfg b/.kokoro/samples/python3.8/periodic-head.cfg new file mode 100644 index 00000000..f9cfcd33 --- /dev/null +++ b/.kokoro/samples/python3.8/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh new file mode 100755 index 00000000..9c281f84 --- /dev/null +++ b/.kokoro/test-samples-against-head.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A customized test runner for samples. +# +# For periodic builds, you can specify this file for testing against head. + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +cd github/python-iot + +exec .kokoro/test-samples-impl.sh diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh new file mode 100755 index 00000000..cf5de74c --- /dev/null +++ b/.kokoro/test-samples-impl.sh @@ -0,0 +1,102 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +# Exit early if samples directory doesn't exist +if [ ! -d "./samples" ]; then + echo "No tests run. `./samples` not found" + exit 0 +fi + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Install nox +python3.6 -m pip install --upgrade --quiet nox + +# Use secrets acessor service account to get secrets +if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then + gcloud auth activate-service-account \ + --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ + --project="cloud-devrel-kokoro-resources" +fi + +# This script will create 3 files: +# - testing/test-env.sh +# - testing/service-account.json +# - testing/client-secrets.json +./scripts/decrypt-secrets.sh + +source ./testing/test-env.sh +export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json + +# For cloud-run session, we activate the service account for gcloud sdk. +gcloud auth activate-service-account \ + --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" + +export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json + +echo -e "\n******************** TESTING PROJECTS ********************" + +# Switch to 'fail at end' to allow all tests to complete before exiting. +set +e +# Use RTN to return a non-zero value if the test fails. +RTN=0 +ROOT=$(pwd) +# Find all requirements.txt in the samples directory (may break on whitespace). +for file in samples/**/requirements.txt; do + cd "$ROOT" + # Navigate to the project folder. + file=$(dirname "$file") + cd "$file" + + echo "------------------------------------------------------------" + echo "- testing $file" + echo "------------------------------------------------------------" + + # Use nox to execute the tests for the project. + python3.6 -m nox -s "$RUN_TESTS_SESSION" + EXIT=$? + + # If this is a periodic build, send the test log to the FlakyBot. + # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. + if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + fi + + if [[ $EXIT -ne 0 ]]; then + RTN=1 + echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" + else + echo -e "\n Testing completed.\n" + fi + +done +cd "$ROOT" + +# Workaround for Kokoro permissions issue: delete secrets +rm testing/{test-env.sh,client-secrets.json,service-account.json} + +exit "$RTN" diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh index 331b2579..1f9137a8 100755 --- a/.kokoro/test-samples.sh +++ b/.kokoro/test-samples.sh @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +# The default test runner for samples. +# +# For periodic builds, we rewinds the repo to the latest release, and +# run test-samples-impl.sh. # `-e` enables the script to automatically fail when a command fails # `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero @@ -24,81 +28,19 @@ cd github/python-iot # Run periodic samples tests at latest release if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + # preserving the test runner implementation. + cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh" + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + echo "Now we rewind the repo back to the latest release..." LATEST_RELEASE=$(git describe --abbrev=0 --tags) git checkout $LATEST_RELEASE -fi - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Debug: show build environment -env | grep KOKORO - -# Install nox -python3.6 -m pip install --upgrade --quiet nox - -# Use secrets acessor service account to get secrets -if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then - gcloud auth activate-service-account \ - --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ - --project="cloud-devrel-kokoro-resources" -fi - -# This script will create 3 files: -# - testing/test-env.sh -# - testing/service-account.json -# - testing/client-secrets.json -./scripts/decrypt-secrets.sh - -source ./testing/test-env.sh -export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json - -# For cloud-run session, we activate the service account for gcloud sdk. -gcloud auth activate-service-account \ - --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" - -export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json - -echo -e "\n******************** TESTING PROJECTS ********************" - -# Switch to 'fail at end' to allow all tests to complete before exiting. -set +e -# Use RTN to return a non-zero value if the test fails. -RTN=0 -ROOT=$(pwd) -# Find all requirements.txt in the samples directory (may break on whitespace). -for file in samples/**/requirements.txt; do - cd "$ROOT" - # Navigate to the project folder. - file=$(dirname "$file") - cd "$file" - - echo "------------------------------------------------------------" - echo "- testing $file" - echo "------------------------------------------------------------" - - # Use nox to execute the tests for the project. - python3.6 -m nox -s "$RUN_TESTS_SESSION" - EXIT=$? - - # If this is a periodic build, send the test log to the Build Cop Bot. - # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/buildcop. - if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop - $KOKORO_GFILE_DIR/linux_amd64/buildcop + echo "The current head is: " + echo $(git rev-parse --verify HEAD) + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + # move back the test runner implementation if there's no file. + if [ ! -f .kokoro/test-samples-impl.sh ]; then + cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh fi +fi - if [[ $EXIT -ne 0 ]]; then - RTN=1 - echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" - else - echo -e "\n Testing completed.\n" - fi - -done -cd "$ROOT" - -# Workaround for Kokoro permissions issue: delete secrets -rm testing/{test-env.sh,client-secrets.json,service-account.json} - -exit "$RTN" \ No newline at end of file +exec .kokoro/test-samples-impl.sh diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh index e8c4251f..f39236e9 100755 --- a/.kokoro/trampoline.sh +++ b/.kokoro/trampoline.sh @@ -15,9 +15,14 @@ set -eo pipefail -python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" || ret_code=$? +# Always run the cleanup script, regardless of the success of bouncing into +# the container. +function cleanup() { + chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + echo "cleanup"; +} +trap cleanup EXIT -chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh -${KOKORO_GFILE_DIR}/trampoline_cleanup.sh || true - -exit ${ret_code} +$(dirname $0)/populate-secrets.sh # Secret Manager secrets. +python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" \ No newline at end of file diff --git a/.kokoro/trampoline_v2.sh b/.kokoro/trampoline_v2.sh index 719bcd5b..4af6cdc2 100755 --- a/.kokoro/trampoline_v2.sh +++ b/.kokoro/trampoline_v2.sh @@ -159,7 +159,7 @@ if [[ -n "${KOKORO_BUILD_ID:-}" ]]; then "KOKORO_GITHUB_COMMIT" "KOKORO_GITHUB_PULL_REQUEST_NUMBER" "KOKORO_GITHUB_PULL_REQUEST_COMMIT" - # For Build Cop Bot + # For FlakyBot "KOKORO_GITHUB_COMMIT_URL" "KOKORO_GITHUB_PULL_REQUEST_URL" ) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..32302e48 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,17 @@ +# See https://pre-commit.com for more information +# See https://pre-commit.com/hooks.html for more hooks +repos: +- repo: https://github.com/pre-commit/pre-commit-hooks + rev: v3.4.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml +- repo: https://github.com/psf/black + rev: 19.10b0 + hooks: + - id: black +- repo: https://gitlab.com/pycqa/flake8 + rev: 3.9.0 + hooks: + - id: flake8 diff --git a/.trampolinerc b/.trampolinerc index 995ee291..383b6ec8 100644 --- a/.trampolinerc +++ b/.trampolinerc @@ -24,6 +24,7 @@ required_envvars+=( pass_down_envvars+=( "STAGING_BUCKET" "V2_STAGING_BUCKET" + "NOX_SESSION" ) # Prevent unintentional override on the default image. diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index b3d1f602..039f4368 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -1,44 +1,95 @@ -# Contributor Code of Conduct +# Code of Conduct -As contributors and maintainers of this project, -and in the interest of fostering an open and welcoming community, -we pledge to respect all people who contribute through reporting issues, -posting feature requests, updating documentation, -submitting pull requests or patches, and other activities. +## Our Pledge -We are committed to making participation in this project -a harassment-free experience for everyone, -regardless of level of experience, gender, gender identity and expression, -sexual orientation, disability, personal appearance, -body size, race, ethnicity, age, religion, or nationality. +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members Examples of unacceptable behavior by participants include: -* The use of sexualized language or imagery -* Personal attacks -* Trolling or insulting/derogatory comments -* Public or private harassment -* Publishing other's private information, -such as physical or electronic -addresses, without explicit permission -* Other unethical or unprofessional conduct. +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject -comments, commits, code, wiki edits, issues, and other contributions -that are not aligned to this Code of Conduct. -By adopting this Code of Conduct, -project maintainers commit themselves to fairly and consistently -applying these principles to every aspect of managing this project. -Project maintainers who do not follow or enforce the Code of Conduct -may be permanently removed from the project team. - -This code of conduct applies both within project spaces and in public spaces -when an individual is representing the project or its community. - -Instances of abusive, harassing, or otherwise unacceptable behavior -may be reported by opening an issue -or contacting one or more of the project maintainers. - -This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0, -available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/) +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 192450f6..fa7922c3 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -21,8 +21,8 @@ In order to add a feature: - The feature must be documented in both the API and narrative documentation. -- The feature must work fully on the following CPython versions: 2.7, - 3.5, 3.6, 3.7 and 3.8 on both UNIX and Windows. +- The feature must work fully on the following CPython versions: + 3.6, 3.7, 3.8 and 3.9 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -70,9 +70,14 @@ We use `nox `__ to instrument our tests. - To test your changes, run unit tests with ``nox``:: $ nox -s unit-2.7 - $ nox -s unit-3.7 + $ nox -s unit-3.8 $ ... +- Args to pytest can be passed through the nox command separated by a `--`. For + example, to run a single test:: + + $ nox -s unit-3.8 -- -k + .. note:: The unit tests and system tests are described in the @@ -80,25 +85,6 @@ We use `nox `__ to instrument our tests. .. nox: https://pypi.org/project/nox/ -Note on Editable Installs / Develop Mode -======================================== - -- As mentioned previously, using ``setuptools`` in `develop mode`_ - or a ``pip`` `editable install`_ is not possible with this - library. This is because this library uses `namespace packages`_. - For context see `Issue #2316`_ and the relevant `PyPA issue`_. - - Since ``editable`` / ``develop`` mode can't be used, packages - need to be installed directly. Hence your changes to the source - tree don't get incorporated into the **already installed** - package. - -.. _namespace packages: https://www.python.org/dev/peps/pep-0420/ -.. _Issue #2316: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2316 -.. _PyPA issue: https://github.com/pypa/packaging-problems/issues/12 -.. _develop mode: https://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode -.. _editable install: https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs - ***************************************** I'm getting weird errors... Can you help? ***************************************** @@ -112,8 +98,12 @@ On Debian/Ubuntu:: ************ Coding Style ************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: -- PEP8 compliance, with exceptions defined in the linter configuration. + $ nox -s blacken + +- PEP8 compliance is required, with exceptions defined in the linter configuration. If you have ``nox`` installed, you can test that you have not introduced any non-compliant code via:: @@ -130,6 +120,16 @@ Coding Style should point to the official ``googleapis`` checkout and the the branch should be the main branch on that remote (``master``). +- This repository contains configuration for the + `pre-commit `__ tool, which automates checking + our linters during a commit. If you have it installed on your ``$PATH``, + you can enable enforcing those checks via: + +.. code-block:: bash + + $ pre-commit install + pre-commit installed at .git/hooks/pre-commit + Exceptions to PEP8: - Many unit tests use a helper method, ``_call_fut`` ("FUT" is short for @@ -142,13 +142,18 @@ Running System Tests - To run system tests, you can execute:: - $ nox -s system-3.7 + # Run all system tests + $ nox -s system-3.8 $ nox -s system-2.7 + # Run a single system test + $ nox -s system-3.8 -- -k + + .. note:: System tests are only configured to run under Python 2.7 and - Python 3.7. For expediency, we do not run them in older versions + Python 3.8. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local @@ -211,25 +216,24 @@ Supported Python Versions We support: -- `Python 3.5`_ - `Python 3.6`_ - `Python 3.7`_ - `Python 3.8`_ +- `Python 3.9`_ -.. _Python 3.5: https://docs.python.org/3.5/ .. _Python 3.6: https://docs.python.org/3.6/ .. _Python 3.7: https://docs.python.org/3.7/ .. _Python 3.8: https://docs.python.org/3.8/ +.. _Python 3.9: https://docs.python.org/3.9/ Supported versions can be found in our ``noxfile.py`` `config`_. .. _config: https://github.com/googleapis/python-iot/blob/master/noxfile.py -Python 2.7 support is deprecated. All code changes should maintain Python 2.7 compatibility until January 1, 2020. We also explicitly decided to support Python 3 beginning with version -3.5. Reasons for this include: +3.6. Reasons for this include: - Encouraging use of newest versions of Python 3 - Taking the lead of `prominent`_ open-source `projects`_ diff --git a/LICENSE b/LICENSE index a8ee855d..d6456956 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,7 @@ - Apache License + + Apache License Version 2.0, January 2004 - https://www.apache.org/licenses/ + http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION @@ -192,7 +193,7 @@ you may not use this file except in compliance with the License. You may obtain a copy of the License at - https://www.apache.org/licenses/LICENSE-2.0 + http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, diff --git a/MANIFEST.in b/MANIFEST.in index e9e29d12..e783f4c6 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -16,10 +16,10 @@ # Generated by synthtool. DO NOT EDIT! include README.rst LICENSE -recursive-include google *.json *.proto +recursive-include google *.json *.proto py.typed recursive-include tests * global-exclude *.py[co] global-exclude __pycache__ # Exclude scripts for samples readmegen -prune scripts/readme-gen \ No newline at end of file +prune scripts/readme-gen diff --git a/UPGRADING.md b/UPGRADING.md index 32c8329d..7815d5f2 100644 --- a/UPGRADING.md +++ b/UPGRADING.md @@ -17,10 +17,10 @@ The 2.0.0 release requires Python 3.6+. Methods expect request objects. We provide a script that will convert most common use cases. -* Install the library +* Install the library with `libcst`. ```py -python3 -m pip install google-cloud-iot +python3 -m pip install google-cloud-iot[libcst] ``` * The script `fixup_iot_v1_keywords.py` is shipped with the library. It expects @@ -154,4 +154,4 @@ project = 'my-project' location = 'location' location_path = f'projects/{project}/locations/{location}' -``` \ No newline at end of file +``` diff --git a/docs/_static/custom.css b/docs/_static/custom.css index 0abaf229..bcd37bbd 100644 --- a/docs/_static/custom.css +++ b/docs/_static/custom.css @@ -1,4 +1,9 @@ div#python2-eol { border-color: red; border-width: medium; -} \ No newline at end of file +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} diff --git a/docs/iot_v1/device_manager.rst b/docs/iot_v1/device_manager.rst new file mode 100644 index 00000000..647b9762 --- /dev/null +++ b/docs/iot_v1/device_manager.rst @@ -0,0 +1,11 @@ +DeviceManager +------------------------------- + +.. automodule:: google.cloud.iot_v1.services.device_manager + :members: + :inherited-members: + + +.. automodule:: google.cloud.iot_v1.services.device_manager.pagers + :members: + :inherited-members: diff --git a/docs/iot_v1/services.rst b/docs/iot_v1/services.rst index c077b659..64ed529c 100644 --- a/docs/iot_v1/services.rst +++ b/docs/iot_v1/services.rst @@ -1,6 +1,6 @@ Services for Google Cloud Iot v1 API ==================================== +.. toctree:: + :maxdepth: 2 -.. automodule:: google.cloud.iot_v1.services.device_manager - :members: - :inherited-members: + device_manager diff --git a/docs/iot_v1/types.rst b/docs/iot_v1/types.rst index f4ffca48..05bd7958 100644 --- a/docs/iot_v1/types.rst +++ b/docs/iot_v1/types.rst @@ -3,3 +3,5 @@ Types for Google Cloud Iot v1 API .. automodule:: google.cloud.iot_v1.types :members: + :undoc-members: + :show-inheritance: diff --git a/google/cloud/iot_v1/proto/device_manager.proto b/google/cloud/iot_v1/proto/device_manager.proto index 44d11afe..dfd6c9b6 100644 --- a/google/cloud/iot_v1/proto/device_manager.proto +++ b/google/cloud/iot_v1/proto/device_manager.proto @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC. +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,7 +11,6 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -// syntax = "proto3"; @@ -392,7 +391,8 @@ message GetDeviceRequest { ]; // The fields of the `Device` resource to be returned in the response. If the - // field mask is unset or empty, all fields are returned. + // field mask is unset or empty, all fields are returned. Fields have to be + // provided in snake_case format, for example: `last_heartbeat_time`. google.protobuf.FieldMask field_mask = 2; } @@ -445,7 +445,8 @@ message ListDevicesRequest { // The fields of the `Device` resource to be returned in the response. The // fields `id` and `num_id` are always returned, along with any - // other fields specified. + // other fields specified in snake_case format, for example: + // `last_heartbeat_time`. google.protobuf.FieldMask field_mask = 4; // Options related to gateways. diff --git a/google/cloud/iot_v1/proto/resources.proto b/google/cloud/iot_v1/proto/resources.proto index 2201d330..a14fc027 100644 --- a/google/cloud/iot_v1/proto/resources.proto +++ b/google/cloud/iot_v1/proto/resources.proto @@ -1,4 +1,4 @@ -// Copyright 2019 Google LLC. +// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. @@ -11,7 +11,6 @@ // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. -// syntax = "proto3"; diff --git a/google/cloud/iot_v1/services/device_manager/async_client.py b/google/cloud/iot_v1/services/device_manager/async_client.py index 2b1bbd31..17923675 100644 --- a/google/cloud/iot_v1/services/device_manager/async_client.py +++ b/google/cloud/iot_v1/services/device_manager/async_client.py @@ -52,13 +52,81 @@ class DeviceManagerAsyncClient: DEFAULT_ENDPOINT = DeviceManagerClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = DeviceManagerClient.DEFAULT_MTLS_ENDPOINT + device_path = staticmethod(DeviceManagerClient.device_path) + parse_device_path = staticmethod(DeviceManagerClient.parse_device_path) registry_path = staticmethod(DeviceManagerClient.registry_path) + parse_registry_path = staticmethod(DeviceManagerClient.parse_registry_path) - device_path = staticmethod(DeviceManagerClient.device_path) + common_billing_account_path = staticmethod( + DeviceManagerClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + DeviceManagerClient.parse_common_billing_account_path + ) + + common_folder_path = staticmethod(DeviceManagerClient.common_folder_path) + parse_common_folder_path = staticmethod( + DeviceManagerClient.parse_common_folder_path + ) + + common_organization_path = staticmethod( + DeviceManagerClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + DeviceManagerClient.parse_common_organization_path + ) + + common_project_path = staticmethod(DeviceManagerClient.common_project_path) + parse_common_project_path = staticmethod( + DeviceManagerClient.parse_common_project_path + ) + + common_location_path = staticmethod(DeviceManagerClient.common_location_path) + parse_common_location_path = staticmethod( + DeviceManagerClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DeviceManagerAsyncClient: The constructed client. + """ + return DeviceManagerClient.from_service_account_info.__func__(DeviceManagerAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DeviceManagerAsyncClient: The constructed client. + """ + return DeviceManagerClient.from_service_account_file.__func__(DeviceManagerAsyncClient, filename, *args, **kwargs) # type: ignore - from_service_account_file = DeviceManagerClient.from_service_account_file from_service_account_json = from_service_account_file + @property + def transport(self) -> DeviceManagerTransport: + """Return the transport used by the client instance. + + Returns: + DeviceManagerTransport: The transport used by the client instance. + """ + return self._client.transport + get_transport_class = functools.partial( type(DeviceManagerClient).get_transport_class, type(DeviceManagerClient) ) @@ -85,16 +153,19 @@ def __init__( client_options (ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint, this is the default value for - the environment variable) and "auto" (auto switch to the default - mTLS endpoint if client SSL credentials is present). However, - the ``api_endpoint`` property takes precedence if provided. - (2) The ``client_cert_source`` property is used to provide client - SSL credentials for mutual TLS transport. If not provided, the - default SSL credentials will be used if present. + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -121,20 +192,22 @@ async def create_device_registry( r"""Creates a device registry that contains devices. Args: - request (:class:`~.device_manager.CreateDeviceRegistryRequest`): + request (:class:`google.cloud.iot_v1.types.CreateDeviceRegistryRequest`): The request object. Request for `CreateDeviceRegistry`. parent (:class:`str`): Required. The project and cloud region where this device registry must be created. For example, ``projects/example-project/locations/us-central1``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - device_registry (:class:`~.resources.DeviceRegistry`): + device_registry (:class:`google.cloud.iot_v1.types.DeviceRegistry`): Required. The device registry. The field ``name`` must be empty. The server will generate that field from the device registry ``id`` provided and the ``parent`` field. + This corresponds to the ``device_registry`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -146,13 +219,14 @@ async def create_device_registry( sent along with the request as metadata. Returns: - ~.resources.DeviceRegistry: + google.cloud.iot_v1.types.DeviceRegistry: A container for a group of devices. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent, device_registry]): + has_flattened_params = any([parent, device_registry]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -200,11 +274,12 @@ async def get_device_registry( r"""Gets a device registry configuration. Args: - request (:class:`~.device_manager.GetDeviceRegistryRequest`): + request (:class:`google.cloud.iot_v1.types.GetDeviceRegistryRequest`): The request object. Request for `GetDeviceRegistry`. name (:class:`str`): Required. The name of the device registry. For example, ``projects/example-project/locations/us-central1/registries/my-registry``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -216,13 +291,14 @@ async def get_device_registry( sent along with the request as metadata. Returns: - ~.resources.DeviceRegistry: + google.cloud.iot_v1.types.DeviceRegistry: A container for a group of devices. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -245,8 +321,9 @@ async def get_device_registry( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=120.0, ), default_timeout=120.0, client_info=DEFAULT_CLIENT_INFO, @@ -277,23 +354,25 @@ async def update_device_registry( r"""Updates a device registry configuration. Args: - request (:class:`~.device_manager.UpdateDeviceRegistryRequest`): + request (:class:`google.cloud.iot_v1.types.UpdateDeviceRegistryRequest`): The request object. Request for `UpdateDeviceRegistry`. - device_registry (:class:`~.resources.DeviceRegistry`): + device_registry (:class:`google.cloud.iot_v1.types.DeviceRegistry`): Required. The new values for the device registry. The ``id`` field must be empty, and the ``name`` field must indicate the path of the resource. For example, ``projects/example-project/locations/us-central1/registries/my-registry``. + This corresponds to the ``device_registry`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - update_mask (:class:`~.field_mask.FieldMask`): + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Required. Only updates the ``device_registry`` fields indicated by this mask. The field mask must not be empty, and it must not contain fields that are immutable or only set by the server. Mutable top-level fields: ``event_notification_config``, ``http_config``, ``mqtt_config``, and ``state_notification_config``. + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -305,13 +384,14 @@ async def update_device_registry( sent along with the request as metadata. Returns: - ~.resources.DeviceRegistry: + google.cloud.iot_v1.types.DeviceRegistry: A container for a group of devices. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([device_registry, update_mask]): + has_flattened_params = any([device_registry, update_mask]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -361,11 +441,12 @@ async def delete_device_registry( r"""Deletes a device registry configuration. Args: - request (:class:`~.device_manager.DeleteDeviceRegistryRequest`): + request (:class:`google.cloud.iot_v1.types.DeleteDeviceRegistryRequest`): The request object. Request for `DeleteDeviceRegistry`. name (:class:`str`): Required. The name of the device registry. For example, ``projects/example-project/locations/us-central1/registries/my-registry``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -379,7 +460,8 @@ async def delete_device_registry( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -402,8 +484,9 @@ async def delete_device_registry( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=120.0, ), default_timeout=120.0, client_info=DEFAULT_CLIENT_INFO, @@ -432,12 +515,13 @@ async def list_device_registries( r"""Lists device registries. Args: - request (:class:`~.device_manager.ListDeviceRegistriesRequest`): + request (:class:`google.cloud.iot_v1.types.ListDeviceRegistriesRequest`): The request object. Request for `ListDeviceRegistries`. parent (:class:`str`): Required. The project and cloud region path. For example, ``projects/example-project/locations/us-central1``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -449,8 +533,8 @@ async def list_device_registries( sent along with the request as metadata. Returns: - ~.pagers.ListDeviceRegistriesAsyncPager: - Response for ``ListDeviceRegistries``. + google.cloud.iot_v1.services.device_manager.pagers.ListDeviceRegistriesAsyncPager: + Response for ListDeviceRegistries. Iterating over this object will yield results and resolve additional pages automatically. @@ -459,7 +543,8 @@ async def list_device_registries( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent]): + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -482,8 +567,9 @@ async def list_device_registries( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=120.0, ), default_timeout=120.0, client_info=DEFAULT_CLIENT_INFO, @@ -520,20 +606,22 @@ async def create_device( r"""Creates a device in a device registry. Args: - request (:class:`~.device_manager.CreateDeviceRequest`): + request (:class:`google.cloud.iot_v1.types.CreateDeviceRequest`): The request object. Request for `CreateDevice`. parent (:class:`str`): Required. The name of the device registry where this device should be created. For example, ``projects/example-project/locations/us-central1/registries/my-registry``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - device (:class:`~.resources.Device`): + device (:class:`google.cloud.iot_v1.types.Device`): Required. The device registration details. The field ``name`` must be empty. The server generates ``name`` from the device registry ``id`` and the ``parent`` field. + This corresponds to the ``device`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -545,13 +633,14 @@ async def create_device( sent along with the request as metadata. Returns: - ~.resources.Device: + google.cloud.iot_v1.types.Device: The device resource. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent, device]): + has_flattened_params = any([parent, device]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -599,13 +688,14 @@ async def get_device( r"""Gets details about a device. Args: - request (:class:`~.device_manager.GetDeviceRequest`): + request (:class:`google.cloud.iot_v1.types.GetDeviceRequest`): The request object. Request for `GetDevice`. name (:class:`str`): Required. The name of the device. For example, ``projects/p0/locations/us-central1/registries/registry0/devices/device0`` or ``projects/p0/locations/us-central1/registries/registry0/devices/{num_id}``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -617,13 +707,14 @@ async def get_device( sent along with the request as metadata. Returns: - ~.resources.Device: + google.cloud.iot_v1.types.Device: The device resource. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -646,8 +737,9 @@ async def get_device( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=120.0, ), default_timeout=120.0, client_info=DEFAULT_CLIENT_INFO, @@ -678,23 +770,25 @@ async def update_device( r"""Updates a device. Args: - request (:class:`~.device_manager.UpdateDeviceRequest`): + request (:class:`google.cloud.iot_v1.types.UpdateDeviceRequest`): The request object. Request for `UpdateDevice`. - device (:class:`~.resources.Device`): + device (:class:`google.cloud.iot_v1.types.Device`): Required. The new values for the device. The ``id`` and ``num_id`` fields must be empty, and the field ``name`` must specify the name path. For example, ``projects/p0/locations/us-central1/registries/registry0/devices/device0``\ or ``projects/p0/locations/us-central1/registries/registry0/devices/{num_id}``. + This corresponds to the ``device`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - update_mask (:class:`~.field_mask.FieldMask`): + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Required. Only updates the ``device`` fields indicated by this mask. The field mask must not be empty, and it must not contain fields that are immutable or only set by the server. Mutable top-level fields: ``credentials``, ``blocked``, and ``metadata`` + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -706,13 +800,14 @@ async def update_device( sent along with the request as metadata. Returns: - ~.resources.Device: + google.cloud.iot_v1.types.Device: The device resource. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([device, update_mask]): + has_flattened_params = any([device, update_mask]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -762,13 +857,14 @@ async def delete_device( r"""Deletes a device. Args: - request (:class:`~.device_manager.DeleteDeviceRequest`): + request (:class:`google.cloud.iot_v1.types.DeleteDeviceRequest`): The request object. Request for `DeleteDevice`. name (:class:`str`): Required. The name of the device. For example, ``projects/p0/locations/us-central1/registries/registry0/devices/device0`` or ``projects/p0/locations/us-central1/registries/registry0/devices/{num_id}``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -782,7 +878,8 @@ async def delete_device( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -805,8 +902,9 @@ async def delete_device( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=120.0, ), default_timeout=120.0, client_info=DEFAULT_CLIENT_INFO, @@ -835,12 +933,13 @@ async def list_devices( r"""List devices in a device registry. Args: - request (:class:`~.device_manager.ListDevicesRequest`): + request (:class:`google.cloud.iot_v1.types.ListDevicesRequest`): The request object. Request for `ListDevices`. parent (:class:`str`): Required. The device registry path. Required. For example, ``projects/my-project/locations/us-central1/registries/my-registry``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -852,8 +951,8 @@ async def list_devices( sent along with the request as metadata. Returns: - ~.pagers.ListDevicesAsyncPager: - Response for ``ListDevices``. + google.cloud.iot_v1.services.device_manager.pagers.ListDevicesAsyncPager: + Response for ListDevices. Iterating over this object will yield results and resolve additional pages automatically. @@ -862,7 +961,8 @@ async def list_devices( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent]): + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -885,8 +985,9 @@ async def list_devices( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=120.0, ), default_timeout=120.0, client_info=DEFAULT_CLIENT_INFO, @@ -925,7 +1026,7 @@ async def modify_cloud_to_device_config( the modified configuration version and its metadata. Args: - request (:class:`~.device_manager.ModifyCloudToDeviceConfigRequest`): + request (:class:`google.cloud.iot_v1.types.ModifyCloudToDeviceConfigRequest`): The request object. Request for `ModifyCloudToDeviceConfig`. name (:class:`str`): @@ -933,12 +1034,14 @@ async def modify_cloud_to_device_config( ``projects/p0/locations/us-central1/registries/registry0/devices/device0`` or ``projects/p0/locations/us-central1/registries/registry0/devices/{num_id}``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. binary_data (:class:`bytes`): Required. The configuration data for the device. + This corresponds to the ``binary_data`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -950,7 +1053,7 @@ async def modify_cloud_to_device_config( sent along with the request as metadata. Returns: - ~.resources.DeviceConfig: + google.cloud.iot_v1.types.DeviceConfig: The device configuration. Eventually delivered to devices. @@ -958,7 +1061,8 @@ async def modify_cloud_to_device_config( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name, binary_data]): + has_flattened_params = any([name, binary_data]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -983,10 +1087,11 @@ async def modify_cloud_to_device_config( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ResourceExhausted, exceptions.ServiceUnavailable, - exceptions.DeadlineExceeded, ), + deadline=120.0, ), default_timeout=120.0, client_info=DEFAULT_CLIENT_INFO, @@ -1017,7 +1122,7 @@ async def list_device_config_versions( configuration in descending order (i.e.: newest first). Args: - request (:class:`~.device_manager.ListDeviceConfigVersionsRequest`): + request (:class:`google.cloud.iot_v1.types.ListDeviceConfigVersionsRequest`): The request object. Request for `ListDeviceConfigVersions`. name (:class:`str`): @@ -1025,6 +1130,7 @@ async def list_device_config_versions( ``projects/p0/locations/us-central1/registries/registry0/devices/device0`` or ``projects/p0/locations/us-central1/registries/registry0/devices/{num_id}``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1036,13 +1142,14 @@ async def list_device_config_versions( sent along with the request as metadata. Returns: - ~.device_manager.ListDeviceConfigVersionsResponse: - Response for ``ListDeviceConfigVersions``. + google.cloud.iot_v1.types.ListDeviceConfigVersionsResponse: + Response for ListDeviceConfigVersions. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1065,8 +1172,9 @@ async def list_device_config_versions( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=120.0, ), default_timeout=120.0, client_info=DEFAULT_CLIENT_INFO, @@ -1097,13 +1205,14 @@ async def list_device_states( descending order (i.e.: newest first). Args: - request (:class:`~.device_manager.ListDeviceStatesRequest`): + request (:class:`google.cloud.iot_v1.types.ListDeviceStatesRequest`): The request object. Request for `ListDeviceStates`. name (:class:`str`): Required. The name of the device. For example, ``projects/p0/locations/us-central1/registries/registry0/devices/device0`` or ``projects/p0/locations/us-central1/registries/registry0/devices/{num_id}``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1115,13 +1224,14 @@ async def list_device_states( sent along with the request as metadata. Returns: - ~.device_manager.ListDeviceStatesResponse: - Response for ``ListDeviceStates``. + google.cloud.iot_v1.types.ListDeviceStatesResponse: + Response for ListDeviceStates. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1144,8 +1254,9 @@ async def list_device_states( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=120.0, ), default_timeout=120.0, client_info=DEFAULT_CLIENT_INFO, @@ -1176,7 +1287,7 @@ async def set_iam_policy( resource. Replaces any existing policy. Args: - request (:class:`~.iam_policy.SetIamPolicyRequest`): + request (:class:`google.iam.v1.iam_policy_pb2.SetIamPolicyRequest`): The request object. Request message for `SetIamPolicy` method. resource (:class:`str`): @@ -1184,6 +1295,7 @@ async def set_iam_policy( policy is being specified. See the operation documentation for the appropriate value for this field. + This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1195,78 +1307,69 @@ async def set_iam_policy( sent along with the request as metadata. Returns: - ~.policy.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([resource]): + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1278,13 +1381,7 @@ async def set_iam_policy( request = iam_policy.SetIamPolicyRequest(**request) elif not request: - request = iam_policy.SetIamPolicyRequest() - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - - if resource is not None: - request.resource = resource + request = iam_policy.SetIamPolicyRequest(resource=resource,) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -1320,7 +1417,7 @@ async def get_iam_policy( not have a policy set. Args: - request (:class:`~.iam_policy.GetIamPolicyRequest`): + request (:class:`google.iam.v1.iam_policy_pb2.GetIamPolicyRequest`): The request object. Request message for `GetIamPolicy` method. resource (:class:`str`): @@ -1328,6 +1425,7 @@ async def get_iam_policy( policy is being requested. See the operation documentation for the appropriate value for this field. + This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1339,78 +1437,69 @@ async def get_iam_policy( sent along with the request as metadata. Returns: - ~.policy.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([resource]): + has_flattened_params = any([resource]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1422,13 +1511,7 @@ async def get_iam_policy( request = iam_policy.GetIamPolicyRequest(**request) elif not request: - request = iam_policy.GetIamPolicyRequest() - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - - if resource is not None: - request.resource = resource + request = iam_policy.GetIamPolicyRequest(resource=resource,) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -1465,7 +1548,7 @@ async def test_iam_permissions( permissions, not a NOT_FOUND error. Args: - request (:class:`~.iam_policy.TestIamPermissionsRequest`): + request (:class:`google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest`): The request object. Request message for `TestIamPermissions` method. resource (:class:`str`): @@ -1473,6 +1556,7 @@ async def test_iam_permissions( policy detail is being requested. See the operation documentation for the appropriate value for this field. + This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1481,6 +1565,7 @@ async def test_iam_permissions( Permissions with wildcards (such as '*' or 'storage.*') are not allowed. For more information see `IAM Overview `__. + This corresponds to the ``permissions`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1492,13 +1577,14 @@ async def test_iam_permissions( sent along with the request as metadata. Returns: - ~.iam_policy.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([resource, permissions]): + has_flattened_params = any([resource, permissions]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1510,16 +1596,9 @@ async def test_iam_permissions( request = iam_policy.TestIamPermissionsRequest(**request) elif not request: - request = iam_policy.TestIamPermissionsRequest() - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - - if resource is not None: - request.resource = resource - - if permissions: - request.permissions.extend(permissions) + request = iam_policy.TestIamPermissionsRequest( + resource=resource, permissions=permissions, + ) # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -1571,19 +1650,21 @@ async def send_command_to_device( from the device. Args: - request (:class:`~.device_manager.SendCommandToDeviceRequest`): + request (:class:`google.cloud.iot_v1.types.SendCommandToDeviceRequest`): The request object. Request for `SendCommandToDevice`. name (:class:`str`): Required. The name of the device. For example, ``projects/p0/locations/us-central1/registries/registry0/devices/device0`` or ``projects/p0/locations/us-central1/registries/registry0/devices/{num_id}``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. binary_data (:class:`bytes`): Required. The command data to send to the device. + This corresponds to the ``binary_data`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1598,6 +1679,7 @@ async def send_command_to_device( must not have more than 256 characters, and must not contain any MQTT wildcards ("+" or "#") or null characters. + This corresponds to the ``subfolder`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1609,13 +1691,14 @@ async def send_command_to_device( sent along with the request as metadata. Returns: - ~.device_manager.SendCommandToDeviceResponse: - Response for ``SendCommandToDevice``. + google.cloud.iot_v1.types.SendCommandToDeviceResponse: + Response for SendCommandToDevice. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name, binary_data, subfolder]): + has_flattened_params = any([name, binary_data, subfolder]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1642,10 +1725,11 @@ async def send_command_to_device( maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ResourceExhausted, exceptions.ServiceUnavailable, - exceptions.DeadlineExceeded, ), + deadline=120.0, ), default_timeout=120.0, client_info=DEFAULT_CLIENT_INFO, @@ -1677,17 +1761,19 @@ async def bind_device_to_gateway( r"""Associates the device with the gateway. Args: - request (:class:`~.device_manager.BindDeviceToGatewayRequest`): + request (:class:`google.cloud.iot_v1.types.BindDeviceToGatewayRequest`): The request object. Request for `BindDeviceToGateway`. parent (:class:`str`): Required. The name of the registry. For example, ``projects/example-project/locations/us-central1/registries/my-registry``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. gateway_id (:class:`str`): Required. The value of ``gateway_id`` can be either the device numeric ID or the user-defined device identifier. + This corresponds to the ``gateway_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1695,6 +1781,7 @@ async def bind_device_to_gateway( Required. The device to associate with the specified gateway. The value of ``device_id`` can be either the device numeric ID or the user-defined device identifier. + This corresponds to the ``device_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1706,13 +1793,14 @@ async def bind_device_to_gateway( sent along with the request as metadata. Returns: - ~.device_manager.BindDeviceToGatewayResponse: - Response for ``BindDeviceToGateway``. + google.cloud.iot_v1.types.BindDeviceToGatewayResponse: + Response for BindDeviceToGateway. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent, gateway_id, device_id]): + has_flattened_params = any([parent, gateway_id, device_id]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1765,18 +1853,20 @@ async def unbind_device_from_gateway( gateway. Args: - request (:class:`~.device_manager.UnbindDeviceFromGatewayRequest`): + request (:class:`google.cloud.iot_v1.types.UnbindDeviceFromGatewayRequest`): The request object. Request for `UnbindDeviceFromGateway`. parent (:class:`str`): Required. The name of the registry. For example, ``projects/example-project/locations/us-central1/registries/my-registry``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. gateway_id (:class:`str`): Required. The value of ``gateway_id`` can be either the device numeric ID or the user-defined device identifier. + This corresponds to the ``gateway_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1784,6 +1874,7 @@ async def unbind_device_from_gateway( Required. The device to disassociate from the specified gateway. The value of ``device_id`` can be either the device numeric ID or the user-defined device identifier. + This corresponds to the ``device_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1795,13 +1886,14 @@ async def unbind_device_from_gateway( sent along with the request as metadata. Returns: - ~.device_manager.UnbindDeviceFromGatewayResponse: - Response for ``UnbindDeviceFromGateway``. + google.cloud.iot_v1.types.UnbindDeviceFromGatewayResponse: + Response for UnbindDeviceFromGateway. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent, gateway_id, device_id]): + has_flattened_params = any([parent, gateway_id, device_id]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." diff --git a/google/cloud/iot_v1/services/device_manager/client.py b/google/cloud/iot_v1/services/device_manager/client.py index 3432432e..a91d5137 100644 --- a/google/cloud/iot_v1/services/device_manager/client.py +++ b/google/cloud/iot_v1/services/device_manager/client.py @@ -16,17 +16,19 @@ # from collections import OrderedDict +from distutils import util import os import re -from typing import Callable, Dict, Sequence, Tuple, Type, Union +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import client_options as client_options_lib # type: ignore from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials # type: ignore from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore @@ -114,6 +116,22 @@ def _get_default_mtls_endpoint(api_endpoint): DEFAULT_ENDPOINT ) + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DeviceManagerClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -126,7 +144,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - {@api.name}: The constructed client. + DeviceManagerClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -134,6 +152,15 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @property + def transport(self) -> DeviceManagerTransport: + """Return the transport used by the client instance. + + Returns: + DeviceManagerTransport: The transport used by the client instance. + """ + return self._transport + @staticmethod def device_path(project: str, location: str, registry: str, device: str,) -> str: """Return a fully-qualified device string.""" @@ -166,12 +193,71 @@ def parse_registry_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Return a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Return a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Return a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Return a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Return a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + def __init__( self, *, - credentials: credentials.Credentials = None, - transport: Union[str, DeviceManagerTransport] = None, - client_options: ClientOptions = None, + credentials: Optional[credentials.Credentials] = None, + transport: Union[str, DeviceManagerTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the device manager client. @@ -182,26 +268,29 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.DeviceManagerTransport]): The + transport (Union[str, DeviceManagerTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint, this is the default value for - the environment variable) and "auto" (auto switch to the default - mTLS endpoint if client SSL credentials is present). However, - the ``api_endpoint`` property takes precedence if provided. - (2) The ``client_cert_source`` property is used to provide client - SSL credentials for mutual TLS transport. If not provided, the - default SSL credentials will be used if present. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -209,29 +298,43 @@ def __init__( creation failed for any reason. """ if isinstance(client_options, dict): - client_options = ClientOptions.from_dict(client_options) + client_options = client_options_lib.from_dict(client_options) if client_options is None: - client_options = ClientOptions.ClientOptions() + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + client_cert_source_func = ( + mtls.default_client_cert_source() if is_mtls else None + ) - if client_options.api_endpoint is None: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never") + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_mtls_env == "never": - client_options.api_endpoint = self.DEFAULT_ENDPOINT + api_endpoint = self.DEFAULT_ENDPOINT elif use_mtls_env == "always": - client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT + api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - has_client_cert_source = ( - client_options.client_cert_source is not None - or mtls.has_default_client_cert_source() - ) - client_options.api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT - if has_client_cert_source - else self.DEFAULT_ENDPOINT + api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT ) else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" ) # Save or instantiate the transport. @@ -255,10 +358,9 @@ def __init__( self._transport = Transport( credentials=credentials, credentials_file=client_options.credentials_file, - host=client_options.api_endpoint, + host=api_endpoint, scopes=client_options.scopes, - api_mtls_endpoint=client_options.api_endpoint, - client_cert_source=client_options.client_cert_source, + client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, ) @@ -276,20 +378,22 @@ def create_device_registry( r"""Creates a device registry that contains devices. Args: - request (:class:`~.device_manager.CreateDeviceRegistryRequest`): + request (google.cloud.iot_v1.types.CreateDeviceRegistryRequest): The request object. Request for `CreateDeviceRegistry`. - parent (:class:`str`): + parent (str): Required. The project and cloud region where this device registry must be created. For example, ``projects/example-project/locations/us-central1``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - device_registry (:class:`~.resources.DeviceRegistry`): + device_registry (google.cloud.iot_v1.types.DeviceRegistry): Required. The device registry. The field ``name`` must be empty. The server will generate that field from the device registry ``id`` provided and the ``parent`` field. + This corresponds to the ``device_registry`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -301,7 +405,7 @@ def create_device_registry( sent along with the request as metadata. Returns: - ~.resources.DeviceRegistry: + google.cloud.iot_v1.types.DeviceRegistry: A container for a group of devices. """ # Create or coerce a protobuf request object. @@ -357,11 +461,12 @@ def get_device_registry( r"""Gets a device registry configuration. Args: - request (:class:`~.device_manager.GetDeviceRegistryRequest`): + request (google.cloud.iot_v1.types.GetDeviceRegistryRequest): The request object. Request for `GetDeviceRegistry`. - name (:class:`str`): + name (str): Required. The name of the device registry. For example, ``projects/example-project/locations/us-central1/registries/my-registry``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -373,7 +478,7 @@ def get_device_registry( sent along with the request as metadata. Returns: - ~.resources.DeviceRegistry: + google.cloud.iot_v1.types.DeviceRegistry: A container for a group of devices. """ # Create or coerce a protobuf request object. @@ -428,23 +533,25 @@ def update_device_registry( r"""Updates a device registry configuration. Args: - request (:class:`~.device_manager.UpdateDeviceRegistryRequest`): + request (google.cloud.iot_v1.types.UpdateDeviceRegistryRequest): The request object. Request for `UpdateDeviceRegistry`. - device_registry (:class:`~.resources.DeviceRegistry`): + device_registry (google.cloud.iot_v1.types.DeviceRegistry): Required. The new values for the device registry. The ``id`` field must be empty, and the ``name`` field must indicate the path of the resource. For example, ``projects/example-project/locations/us-central1/registries/my-registry``. + This corresponds to the ``device_registry`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - update_mask (:class:`~.field_mask.FieldMask`): + update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Only updates the ``device_registry`` fields indicated by this mask. The field mask must not be empty, and it must not contain fields that are immutable or only set by the server. Mutable top-level fields: ``event_notification_config``, ``http_config``, ``mqtt_config``, and ``state_notification_config``. + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -456,7 +563,7 @@ def update_device_registry( sent along with the request as metadata. Returns: - ~.resources.DeviceRegistry: + google.cloud.iot_v1.types.DeviceRegistry: A container for a group of devices. """ # Create or coerce a protobuf request object. @@ -514,11 +621,12 @@ def delete_device_registry( r"""Deletes a device registry configuration. Args: - request (:class:`~.device_manager.DeleteDeviceRegistryRequest`): + request (google.cloud.iot_v1.types.DeleteDeviceRegistryRequest): The request object. Request for `DeleteDeviceRegistry`. - name (:class:`str`): + name (str): Required. The name of the device registry. For example, ``projects/example-project/locations/us-central1/registries/my-registry``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -579,12 +687,13 @@ def list_device_registries( r"""Lists device registries. Args: - request (:class:`~.device_manager.ListDeviceRegistriesRequest`): + request (google.cloud.iot_v1.types.ListDeviceRegistriesRequest): The request object. Request for `ListDeviceRegistries`. - parent (:class:`str`): + parent (str): Required. The project and cloud region path. For example, ``projects/example-project/locations/us-central1``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -596,8 +705,8 @@ def list_device_registries( sent along with the request as metadata. Returns: - ~.pagers.ListDeviceRegistriesPager: - Response for ``ListDeviceRegistries``. + google.cloud.iot_v1.services.device_manager.pagers.ListDeviceRegistriesPager: + Response for ListDeviceRegistries. Iterating over this object will yield results and resolve additional pages automatically. @@ -661,20 +770,22 @@ def create_device( r"""Creates a device in a device registry. Args: - request (:class:`~.device_manager.CreateDeviceRequest`): + request (google.cloud.iot_v1.types.CreateDeviceRequest): The request object. Request for `CreateDevice`. - parent (:class:`str`): + parent (str): Required. The name of the device registry where this device should be created. For example, ``projects/example-project/locations/us-central1/registries/my-registry``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - device (:class:`~.resources.Device`): + device (google.cloud.iot_v1.types.Device): Required. The device registration details. The field ``name`` must be empty. The server generates ``name`` from the device registry ``id`` and the ``parent`` field. + This corresponds to the ``device`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -686,7 +797,7 @@ def create_device( sent along with the request as metadata. Returns: - ~.resources.Device: + google.cloud.iot_v1.types.Device: The device resource. """ # Create or coerce a protobuf request object. @@ -742,13 +853,14 @@ def get_device( r"""Gets details about a device. Args: - request (:class:`~.device_manager.GetDeviceRequest`): + request (google.cloud.iot_v1.types.GetDeviceRequest): The request object. Request for `GetDevice`. - name (:class:`str`): + name (str): Required. The name of the device. For example, ``projects/p0/locations/us-central1/registries/registry0/devices/device0`` or ``projects/p0/locations/us-central1/registries/registry0/devices/{num_id}``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -760,7 +872,7 @@ def get_device( sent along with the request as metadata. Returns: - ~.resources.Device: + google.cloud.iot_v1.types.Device: The device resource. """ # Create or coerce a protobuf request object. @@ -815,23 +927,25 @@ def update_device( r"""Updates a device. Args: - request (:class:`~.device_manager.UpdateDeviceRequest`): + request (google.cloud.iot_v1.types.UpdateDeviceRequest): The request object. Request for `UpdateDevice`. - device (:class:`~.resources.Device`): + device (google.cloud.iot_v1.types.Device): Required. The new values for the device. The ``id`` and ``num_id`` fields must be empty, and the field ``name`` must specify the name path. For example, ``projects/p0/locations/us-central1/registries/registry0/devices/device0``\ or ``projects/p0/locations/us-central1/registries/registry0/devices/{num_id}``. + This corresponds to the ``device`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - update_mask (:class:`~.field_mask.FieldMask`): + update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Only updates the ``device`` fields indicated by this mask. The field mask must not be empty, and it must not contain fields that are immutable or only set by the server. Mutable top-level fields: ``credentials``, ``blocked``, and ``metadata`` + This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -843,7 +957,7 @@ def update_device( sent along with the request as metadata. Returns: - ~.resources.Device: + google.cloud.iot_v1.types.Device: The device resource. """ # Create or coerce a protobuf request object. @@ -901,13 +1015,14 @@ def delete_device( r"""Deletes a device. Args: - request (:class:`~.device_manager.DeleteDeviceRequest`): + request (google.cloud.iot_v1.types.DeleteDeviceRequest): The request object. Request for `DeleteDevice`. - name (:class:`str`): + name (str): Required. The name of the device. For example, ``projects/p0/locations/us-central1/registries/registry0/devices/device0`` or ``projects/p0/locations/us-central1/registries/registry0/devices/{num_id}``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -968,12 +1083,13 @@ def list_devices( r"""List devices in a device registry. Args: - request (:class:`~.device_manager.ListDevicesRequest`): + request (google.cloud.iot_v1.types.ListDevicesRequest): The request object. Request for `ListDevices`. - parent (:class:`str`): + parent (str): Required. The device registry path. Required. For example, ``projects/my-project/locations/us-central1/registries/my-registry``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -985,8 +1101,8 @@ def list_devices( sent along with the request as metadata. Returns: - ~.pagers.ListDevicesPager: - Response for ``ListDevices``. + google.cloud.iot_v1.services.device_manager.pagers.ListDevicesPager: + Response for ListDevices. Iterating over this object will yield results and resolve additional pages automatically. @@ -1052,20 +1168,22 @@ def modify_cloud_to_device_config( the modified configuration version and its metadata. Args: - request (:class:`~.device_manager.ModifyCloudToDeviceConfigRequest`): + request (google.cloud.iot_v1.types.ModifyCloudToDeviceConfigRequest): The request object. Request for `ModifyCloudToDeviceConfig`. - name (:class:`str`): + name (str): Required. The name of the device. For example, ``projects/p0/locations/us-central1/registries/registry0/devices/device0`` or ``projects/p0/locations/us-central1/registries/registry0/devices/{num_id}``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - binary_data (:class:`bytes`): + binary_data (bytes): Required. The configuration data for the device. + This corresponds to the ``binary_data`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1077,7 +1195,7 @@ def modify_cloud_to_device_config( sent along with the request as metadata. Returns: - ~.resources.DeviceConfig: + google.cloud.iot_v1.types.DeviceConfig: The device configuration. Eventually delivered to devices. @@ -1138,14 +1256,15 @@ def list_device_config_versions( configuration in descending order (i.e.: newest first). Args: - request (:class:`~.device_manager.ListDeviceConfigVersionsRequest`): + request (google.cloud.iot_v1.types.ListDeviceConfigVersionsRequest): The request object. Request for `ListDeviceConfigVersions`. - name (:class:`str`): + name (str): Required. The name of the device. For example, ``projects/p0/locations/us-central1/registries/registry0/devices/device0`` or ``projects/p0/locations/us-central1/registries/registry0/devices/{num_id}``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1157,8 +1276,8 @@ def list_device_config_versions( sent along with the request as metadata. Returns: - ~.device_manager.ListDeviceConfigVersionsResponse: - Response for ``ListDeviceConfigVersions``. + google.cloud.iot_v1.types.ListDeviceConfigVersionsResponse: + Response for ListDeviceConfigVersions. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have @@ -1214,13 +1333,14 @@ def list_device_states( descending order (i.e.: newest first). Args: - request (:class:`~.device_manager.ListDeviceStatesRequest`): + request (google.cloud.iot_v1.types.ListDeviceStatesRequest): The request object. Request for `ListDeviceStates`. - name (:class:`str`): + name (str): Required. The name of the device. For example, ``projects/p0/locations/us-central1/registries/registry0/devices/device0`` or ``projects/p0/locations/us-central1/registries/registry0/devices/{num_id}``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1232,8 +1352,8 @@ def list_device_states( sent along with the request as metadata. Returns: - ~.device_manager.ListDeviceStatesResponse: - Response for ``ListDeviceStates``. + google.cloud.iot_v1.types.ListDeviceStatesResponse: + Response for ListDeviceStates. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have @@ -1287,14 +1407,15 @@ def set_iam_policy( resource. Replaces any existing policy. Args: - request (:class:`~.iam_policy.SetIamPolicyRequest`): + request (google.iam.v1.iam_policy_pb2.SetIamPolicyRequest): The request object. Request message for `SetIamPolicy` method. - resource (:class:`str`): + resource (str): REQUIRED: The resource for which the policy is being specified. See the operation documentation for the appropriate value for this field. + This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1306,72 +1427,62 @@ def set_iam_policy( sent along with the request as metadata. Returns: - ~.policy.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). """ # Create or coerce a protobuf request object. @@ -1384,17 +1495,14 @@ def set_iam_policy( "the individual field arguments should be set." ) - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. request = iam_policy.SetIamPolicyRequest(**request) - elif not request: + # Null request, just make one. request = iam_policy.SetIamPolicyRequest() - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if resource is not None: request.resource = resource @@ -1428,14 +1536,15 @@ def get_iam_policy( not have a policy set. Args: - request (:class:`~.iam_policy.GetIamPolicyRequest`): + request (google.iam.v1.iam_policy_pb2.GetIamPolicyRequest): The request object. Request message for `GetIamPolicy` method. - resource (:class:`str`): + resource (str): REQUIRED: The resource for which the policy is being requested. See the operation documentation for the appropriate value for this field. + This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1447,72 +1556,62 @@ def get_iam_policy( sent along with the request as metadata. Returns: - ~.policy.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). """ # Create or coerce a protobuf request object. @@ -1525,17 +1624,14 @@ def get_iam_policy( "the individual field arguments should be set." ) - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. request = iam_policy.GetIamPolicyRequest(**request) - elif not request: + # Null request, just make one. request = iam_policy.GetIamPolicyRequest() - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if resource is not None: request.resource = resource @@ -1570,22 +1666,24 @@ def test_iam_permissions( permissions, not a NOT_FOUND error. Args: - request (:class:`~.iam_policy.TestIamPermissionsRequest`): + request (google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest): The request object. Request message for `TestIamPermissions` method. - resource (:class:`str`): + resource (str): REQUIRED: The resource for which the policy detail is being requested. See the operation documentation for the appropriate value for this field. + This corresponds to the ``resource`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - permissions (:class:`Sequence[str]`): + permissions (Sequence[str]): The set of permissions to check for the ``resource``. Permissions with wildcards (such as '*' or 'storage.*') are not allowed. For more information see `IAM Overview `__. + This corresponds to the ``permissions`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1597,8 +1695,8 @@ def test_iam_permissions( sent along with the request as metadata. Returns: - ~.iam_policy.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have @@ -1610,17 +1708,14 @@ def test_iam_permissions( "the individual field arguments should be set." ) - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. request = iam_policy.TestIamPermissionsRequest(**request) - elif not request: + # Null request, just make one. request = iam_policy.TestIamPermissionsRequest() - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if resource is not None: request.resource = resource @@ -1673,23 +1768,25 @@ def send_command_to_device( from the device. Args: - request (:class:`~.device_manager.SendCommandToDeviceRequest`): + request (google.cloud.iot_v1.types.SendCommandToDeviceRequest): The request object. Request for `SendCommandToDevice`. - name (:class:`str`): + name (str): Required. The name of the device. For example, ``projects/p0/locations/us-central1/registries/registry0/devices/device0`` or ``projects/p0/locations/us-central1/registries/registry0/devices/{num_id}``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - binary_data (:class:`bytes`): + binary_data (bytes): Required. The command data to send to the device. + This corresponds to the ``binary_data`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - subfolder (:class:`str`): + subfolder (str): Optional subfolder for the command. If empty, the command will be delivered to the /devices/{device-id}/commands @@ -1700,6 +1797,7 @@ def send_command_to_device( must not have more than 256 characters, and must not contain any MQTT wildcards ("+" or "#") or null characters. + This corresponds to the ``subfolder`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1711,8 +1809,8 @@ def send_command_to_device( sent along with the request as metadata. Returns: - ~.device_manager.SendCommandToDeviceResponse: - Response for ``SendCommandToDevice``. + google.cloud.iot_v1.types.SendCommandToDeviceResponse: + Response for SendCommandToDevice. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have @@ -1771,24 +1869,27 @@ def bind_device_to_gateway( r"""Associates the device with the gateway. Args: - request (:class:`~.device_manager.BindDeviceToGatewayRequest`): + request (google.cloud.iot_v1.types.BindDeviceToGatewayRequest): The request object. Request for `BindDeviceToGateway`. - parent (:class:`str`): + parent (str): Required. The name of the registry. For example, ``projects/example-project/locations/us-central1/registries/my-registry``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - gateway_id (:class:`str`): + gateway_id (str): Required. The value of ``gateway_id`` can be either the device numeric ID or the user-defined device identifier. + This corresponds to the ``gateway_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - device_id (:class:`str`): + device_id (str): Required. The device to associate with the specified gateway. The value of ``device_id`` can be either the device numeric ID or the user-defined device identifier. + This corresponds to the ``device_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1800,8 +1901,8 @@ def bind_device_to_gateway( sent along with the request as metadata. Returns: - ~.device_manager.BindDeviceToGatewayResponse: - Response for ``BindDeviceToGateway``. + google.cloud.iot_v1.types.BindDeviceToGatewayResponse: + Response for BindDeviceToGateway. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have @@ -1861,25 +1962,28 @@ def unbind_device_from_gateway( gateway. Args: - request (:class:`~.device_manager.UnbindDeviceFromGatewayRequest`): + request (google.cloud.iot_v1.types.UnbindDeviceFromGatewayRequest): The request object. Request for `UnbindDeviceFromGateway`. - parent (:class:`str`): + parent (str): Required. The name of the registry. For example, ``projects/example-project/locations/us-central1/registries/my-registry``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - gateway_id (:class:`str`): + gateway_id (str): Required. The value of ``gateway_id`` can be either the device numeric ID or the user-defined device identifier. + This corresponds to the ``gateway_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - device_id (:class:`str`): + device_id (str): Required. The device to disassociate from the specified gateway. The value of ``device_id`` can be either the device numeric ID or the user-defined device identifier. + This corresponds to the ``device_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1891,8 +1995,8 @@ def unbind_device_from_gateway( sent along with the request as metadata. Returns: - ~.device_manager.UnbindDeviceFromGatewayResponse: - Response for ``UnbindDeviceFromGateway``. + google.cloud.iot_v1.types.UnbindDeviceFromGatewayResponse: + Response for UnbindDeviceFromGateway. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have diff --git a/google/cloud/iot_v1/services/device_manager/pagers.py b/google/cloud/iot_v1/services/device_manager/pagers.py index c3db7418..d60374c1 100644 --- a/google/cloud/iot_v1/services/device_manager/pagers.py +++ b/google/cloud/iot_v1/services/device_manager/pagers.py @@ -15,7 +15,16 @@ # limitations under the License. # -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) from google.cloud.iot_v1.types import device_manager from google.cloud.iot_v1.types import resources @@ -25,7 +34,7 @@ class ListDeviceRegistriesPager: """A pager for iterating through ``list_device_registries`` requests. This class thinly wraps an initial - :class:`~.device_manager.ListDeviceRegistriesResponse` object, and + :class:`google.cloud.iot_v1.types.ListDeviceRegistriesResponse` object, and provides an ``__iter__`` method to iterate through its ``device_registries`` field. @@ -34,7 +43,7 @@ class ListDeviceRegistriesPager: through the ``device_registries`` field on the corresponding responses. - All the usual :class:`~.device_manager.ListDeviceRegistriesResponse` + All the usual :class:`google.cloud.iot_v1.types.ListDeviceRegistriesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -52,9 +61,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.device_manager.ListDeviceRegistriesRequest`): + request (google.cloud.iot_v1.types.ListDeviceRegistriesRequest): The initial request object. - response (:class:`~.device_manager.ListDeviceRegistriesResponse`): + response (google.cloud.iot_v1.types.ListDeviceRegistriesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -87,7 +96,7 @@ class ListDeviceRegistriesAsyncPager: """A pager for iterating through ``list_device_registries`` requests. This class thinly wraps an initial - :class:`~.device_manager.ListDeviceRegistriesResponse` object, and + :class:`google.cloud.iot_v1.types.ListDeviceRegistriesResponse` object, and provides an ``__aiter__`` method to iterate through its ``device_registries`` field. @@ -96,7 +105,7 @@ class ListDeviceRegistriesAsyncPager: through the ``device_registries`` field on the corresponding responses. - All the usual :class:`~.device_manager.ListDeviceRegistriesResponse` + All the usual :class:`google.cloud.iot_v1.types.ListDeviceRegistriesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -114,9 +123,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.device_manager.ListDeviceRegistriesRequest`): + request (google.cloud.iot_v1.types.ListDeviceRegistriesRequest): The initial request object. - response (:class:`~.device_manager.ListDeviceRegistriesResponse`): + response (google.cloud.iot_v1.types.ListDeviceRegistriesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -153,7 +162,7 @@ class ListDevicesPager: """A pager for iterating through ``list_devices`` requests. This class thinly wraps an initial - :class:`~.device_manager.ListDevicesResponse` object, and + :class:`google.cloud.iot_v1.types.ListDevicesResponse` object, and provides an ``__iter__`` method to iterate through its ``devices`` field. @@ -162,7 +171,7 @@ class ListDevicesPager: through the ``devices`` field on the corresponding responses. - All the usual :class:`~.device_manager.ListDevicesResponse` + All the usual :class:`google.cloud.iot_v1.types.ListDevicesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -180,9 +189,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.device_manager.ListDevicesRequest`): + request (google.cloud.iot_v1.types.ListDevicesRequest): The initial request object. - response (:class:`~.device_manager.ListDevicesResponse`): + response (google.cloud.iot_v1.types.ListDevicesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -215,7 +224,7 @@ class ListDevicesAsyncPager: """A pager for iterating through ``list_devices`` requests. This class thinly wraps an initial - :class:`~.device_manager.ListDevicesResponse` object, and + :class:`google.cloud.iot_v1.types.ListDevicesResponse` object, and provides an ``__aiter__`` method to iterate through its ``devices`` field. @@ -224,7 +233,7 @@ class ListDevicesAsyncPager: through the ``devices`` field on the corresponding responses. - All the usual :class:`~.device_manager.ListDevicesResponse` + All the usual :class:`google.cloud.iot_v1.types.ListDevicesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -242,9 +251,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.device_manager.ListDevicesRequest`): + request (google.cloud.iot_v1.types.ListDevicesRequest): The initial request object. - response (:class:`~.device_manager.ListDevicesResponse`): + response (google.cloud.iot_v1.types.ListDevicesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. diff --git a/google/cloud/iot_v1/services/device_manager/transports/__init__.py b/google/cloud/iot_v1/services/device_manager/transports/__init__.py index c1e5e505..0bb0d90f 100644 --- a/google/cloud/iot_v1/services/device_manager/transports/__init__.py +++ b/google/cloud/iot_v1/services/device_manager/transports/__init__.py @@ -28,7 +28,6 @@ _transport_registry["grpc"] = DeviceManagerGrpcTransport _transport_registry["grpc_asyncio"] = DeviceManagerGrpcAsyncIOTransport - __all__ = ( "DeviceManagerTransport", "DeviceManagerGrpcTransport", diff --git a/google/cloud/iot_v1/services/device_manager/transports/base.py b/google/cloud/iot_v1/services/device_manager/transports/base.py index 5236e9a7..68114f2a 100644 --- a/google/cloud/iot_v1/services/device_manager/transports/base.py +++ b/google/cloud/iot_v1/services/device_manager/transports/base.py @@ -74,10 +74,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. @@ -85,6 +85,9 @@ def __init__( host += ":443" self._host = host + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: @@ -94,20 +97,17 @@ def __init__( if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id ) elif credentials is None: credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id + scopes=self._scopes, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { @@ -123,8 +123,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=120.0, ), default_timeout=120.0, client_info=client_info, @@ -141,8 +142,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=120.0, ), default_timeout=120.0, client_info=client_info, @@ -154,8 +156,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=120.0, ), default_timeout=120.0, client_info=client_info, @@ -170,8 +173,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=120.0, ), default_timeout=120.0, client_info=client_info, @@ -186,8 +190,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=120.0, ), default_timeout=120.0, client_info=client_info, @@ -199,8 +204,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=120.0, ), default_timeout=120.0, client_info=client_info, @@ -212,10 +218,11 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ResourceExhausted, exceptions.ServiceUnavailable, - exceptions.DeadlineExceeded, ), + deadline=120.0, ), default_timeout=120.0, client_info=client_info, @@ -227,8 +234,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=120.0, ), default_timeout=120.0, client_info=client_info, @@ -240,8 +248,9 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( - exceptions.ServiceUnavailable, exceptions.DeadlineExceeded, + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, ), + deadline=120.0, ), default_timeout=120.0, client_info=client_info, @@ -264,10 +273,11 @@ def _prep_wrapped_messages(self, client_info): maximum=60.0, multiplier=1.3, predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ResourceExhausted, exceptions.ServiceUnavailable, - exceptions.DeadlineExceeded, ), + deadline=120.0, ), default_timeout=120.0, client_info=client_info, diff --git a/google/cloud/iot_v1/services/device_manager/transports/grpc.py b/google/cloud/iot_v1/services/device_manager/transports/grpc.py index 30bd43a2..559138cd 100644 --- a/google/cloud/iot_v1/services/device_manager/transports/grpc.py +++ b/google/cloud/iot_v1/services/device_manager/transports/grpc.py @@ -15,6 +15,7 @@ # limitations under the License. # +import warnings from typing import Callable, Dict, Optional, Sequence, Tuple from google.api_core import grpc_helpers # type: ignore @@ -23,7 +24,6 @@ from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore - import grpc # type: ignore from google.cloud.iot_v1.types import device_manager @@ -61,6 +61,8 @@ def __init__( channel: grpc.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -81,20 +83,26 @@ def __init__( ignored if ``channel`` is provided. channel (Optional[grpc.Channel]): A ``Channel`` instance through which to make calls. - api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If - provided, it overrides the ``host`` argument and tries to create + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A - callback to provide client SSL certificate bytes and private key - bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` - is None. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -103,57 +111,70 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel - elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) else: - ssl_credentials = SslCredentials().ssl_credentials - - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - ) - - self._stubs = {} # type: Dict[str, Callable] + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - # Run the base constructor. + # The base transport sets the host, credentials and scopes super().__init__( host=host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, + scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, ) + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + @classmethod def create_channel( cls, @@ -166,7 +187,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optionsl[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -201,19 +222,8 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. + """Return the channel designed to connect to this service. """ - # Sanity check: Only create a new channel if we do not already - # have one. - if not hasattr(self, "_grpc_channel"): - self._grpc_channel = self.create_channel( - self._host, credentials=self._credentials, - ) - - # Return the channel from cache. return self._grpc_channel @property diff --git a/google/cloud/iot_v1/services/device_manager/transports/grpc_asyncio.py b/google/cloud/iot_v1/services/device_manager/transports/grpc_asyncio.py index fc84f7aa..21fef140 100644 --- a/google/cloud/iot_v1/services/device_manager/transports/grpc_asyncio.py +++ b/google/cloud/iot_v1/services/device_manager/transports/grpc_asyncio.py @@ -15,10 +15,12 @@ # limitations under the License. # +import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore +from google import auth # type: ignore from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -64,7 +66,7 @@ def create_channel( ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -103,6 +105,8 @@ def __init__( channel: aio.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -124,20 +128,26 @@ def __init__( are passed to :func:`google.auth.default`. channel (Optional[aio.Channel]): A ``Channel`` instance through which to make calls. - api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If - provided, it overrides the ``host`` argument and tries to create + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A - callback to provide client SSL certificate bytes and private key - bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` - is None. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -146,51 +156,69 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel - elif api_mtls_endpoint: - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) else: - ssl_credentials = SslCredentials().ssl_credentials - - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - ) + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - # Run the base constructor. + # The base transport sets the host, credentials and scopes super().__init__( host=host, credentials=credentials, credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, + scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, ) - self._stubs = {} + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: @@ -199,13 +227,6 @@ def grpc_channel(self) -> aio.Channel: This property caches on the instance; repeated calls return the same channel. """ - # Sanity check: Only create a new channel if we do not already - # have one. - if not hasattr(self, "_grpc_channel"): - self._grpc_channel = self.create_channel( - self._host, credentials=self._credentials, - ) - # Return the channel from cache. return self._grpc_channel diff --git a/google/cloud/iot_v1/types/__init__.py b/google/cloud/iot_v1/types/__init__.py index 37160f38..797937dd 100644 --- a/google/cloud/iot_v1/types/__init__.py +++ b/google/cloud/iot_v1/types/__init__.py @@ -15,87 +15,100 @@ # limitations under the License. # -from .resources import ( - Device, - GatewayConfig, - DeviceRegistry, - MqttConfig, - HttpConfig, - EventNotificationConfig, - StateNotificationConfig, - RegistryCredential, - X509CertificateDetails, - PublicKeyCertificate, - DeviceCredential, - PublicKeyCredential, - DeviceConfig, - DeviceState, -) from .device_manager import ( + BindDeviceToGatewayRequest, + BindDeviceToGatewayResponse, CreateDeviceRegistryRequest, - GetDeviceRegistryRequest, - DeleteDeviceRegistryRequest, - UpdateDeviceRegistryRequest, - ListDeviceRegistriesRequest, - ListDeviceRegistriesResponse, CreateDeviceRequest, - GetDeviceRequest, - UpdateDeviceRequest, + DeleteDeviceRegistryRequest, DeleteDeviceRequest, - ListDevicesRequest, GatewayListOptions, - ListDevicesResponse, - ModifyCloudToDeviceConfigRequest, + GetDeviceRegistryRequest, + GetDeviceRequest, ListDeviceConfigVersionsRequest, ListDeviceConfigVersionsResponse, + ListDeviceRegistriesRequest, + ListDeviceRegistriesResponse, + ListDevicesRequest, + ListDevicesResponse, ListDeviceStatesRequest, ListDeviceStatesResponse, + ModifyCloudToDeviceConfigRequest, SendCommandToDeviceRequest, SendCommandToDeviceResponse, - BindDeviceToGatewayRequest, - BindDeviceToGatewayResponse, UnbindDeviceFromGatewayRequest, UnbindDeviceFromGatewayResponse, + UpdateDeviceRegistryRequest, + UpdateDeviceRequest, +) +from .resources import ( + Device, + DeviceConfig, + DeviceCredential, + DeviceRegistry, + DeviceState, + EventNotificationConfig, + GatewayConfig, + HttpConfig, + MqttConfig, + PublicKeyCertificate, + PublicKeyCredential, + RegistryCredential, + StateNotificationConfig, + X509CertificateDetails, + GatewayAuthMethod, + GatewayType, + HttpState, + LogLevel, + MqttState, + PublicKeyCertificateFormat, + PublicKeyFormat, ) - __all__ = ( - "Device", - "GatewayConfig", - "DeviceRegistry", - "MqttConfig", - "HttpConfig", - "EventNotificationConfig", - "StateNotificationConfig", - "RegistryCredential", - "X509CertificateDetails", - "PublicKeyCertificate", - "DeviceCredential", - "PublicKeyCredential", - "DeviceConfig", - "DeviceState", + "BindDeviceToGatewayRequest", + "BindDeviceToGatewayResponse", "CreateDeviceRegistryRequest", - "GetDeviceRegistryRequest", - "DeleteDeviceRegistryRequest", - "UpdateDeviceRegistryRequest", - "ListDeviceRegistriesRequest", - "ListDeviceRegistriesResponse", "CreateDeviceRequest", - "GetDeviceRequest", - "UpdateDeviceRequest", + "DeleteDeviceRegistryRequest", "DeleteDeviceRequest", - "ListDevicesRequest", "GatewayListOptions", - "ListDevicesResponse", - "ModifyCloudToDeviceConfigRequest", + "GetDeviceRegistryRequest", + "GetDeviceRequest", "ListDeviceConfigVersionsRequest", "ListDeviceConfigVersionsResponse", + "ListDeviceRegistriesRequest", + "ListDeviceRegistriesResponse", + "ListDevicesRequest", + "ListDevicesResponse", "ListDeviceStatesRequest", "ListDeviceStatesResponse", + "ModifyCloudToDeviceConfigRequest", "SendCommandToDeviceRequest", "SendCommandToDeviceResponse", - "BindDeviceToGatewayRequest", - "BindDeviceToGatewayResponse", "UnbindDeviceFromGatewayRequest", "UnbindDeviceFromGatewayResponse", + "UpdateDeviceRegistryRequest", + "UpdateDeviceRequest", + "Device", + "DeviceConfig", + "DeviceCredential", + "DeviceRegistry", + "DeviceState", + "EventNotificationConfig", + "GatewayConfig", + "HttpConfig", + "MqttConfig", + "PublicKeyCertificate", + "PublicKeyCredential", + "RegistryCredential", + "StateNotificationConfig", + "X509CertificateDetails", + "GatewayAuthMethod", + "GatewayType", + "HttpState", + "LogLevel", + "MqttState", + "PublicKeyCertificateFormat", + "PublicKeyFormat", ) diff --git a/google/cloud/iot_v1/types/device_manager.py b/google/cloud/iot_v1/types/device_manager.py index d118cb24..e53586a4 100644 --- a/google/cloud/iot_v1/types/device_manager.py +++ b/google/cloud/iot_v1/types/device_manager.py @@ -61,7 +61,7 @@ class CreateDeviceRegistryRequest(proto.Message): Required. The project and cloud region where this device registry must be created. For example, ``projects/example-project/locations/us-central1``. - device_registry (~.resources.DeviceRegistry): + device_registry (google.cloud.iot_v1.types.DeviceRegistry): Required. The device registry. The field ``name`` must be empty. The server will generate that field from the device registry ``id`` provided and the ``parent`` field. @@ -102,12 +102,12 @@ class UpdateDeviceRegistryRequest(proto.Message): r"""Request for ``UpdateDeviceRegistry``. Attributes: - device_registry (~.resources.DeviceRegistry): + device_registry (google.cloud.iot_v1.types.DeviceRegistry): Required. The new values for the device registry. The ``id`` field must be empty, and the ``name`` field must indicate the path of the resource. For example, ``projects/example-project/locations/us-central1/registries/my-registry``. - update_mask (~.gp_field_mask.FieldMask): + update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Only updates the ``device_registry`` fields indicated by this mask. The field mask must not be empty, and it must not contain fields that are immutable or only @@ -154,7 +154,7 @@ class ListDeviceRegistriesResponse(proto.Message): r"""Response for ``ListDeviceRegistries``. Attributes: - device_registries (Sequence[~.resources.DeviceRegistry]): + device_registries (Sequence[google.cloud.iot_v1.types.DeviceRegistry]): The registries that matched the query. next_page_token (str): If not empty, indicates that there may be more registries @@ -181,7 +181,7 @@ class CreateDeviceRequest(proto.Message): Required. The name of the device registry where this device should be created. For example, ``projects/example-project/locations/us-central1/registries/my-registry``. - device (~.resources.Device): + device (google.cloud.iot_v1.types.Device): Required. The device registration details. The field ``name`` must be empty. The server generates ``name`` from the device registry ``id`` and the ``parent`` field. @@ -201,10 +201,11 @@ class GetDeviceRequest(proto.Message): ``projects/p0/locations/us-central1/registries/registry0/devices/device0`` or ``projects/p0/locations/us-central1/registries/registry0/devices/{num_id}``. - field_mask (~.gp_field_mask.FieldMask): + field_mask (google.protobuf.field_mask_pb2.FieldMask): The fields of the ``Device`` resource to be returned in the response. If the field mask is unset or empty, all fields - are returned. + are returned. Fields have to be provided in snake_case + format, for example: ``last_heartbeat_time``. """ name = proto.Field(proto.STRING, number=1) @@ -216,13 +217,13 @@ class UpdateDeviceRequest(proto.Message): r"""Request for ``UpdateDevice``. Attributes: - device (~.resources.Device): + device (google.cloud.iot_v1.types.Device): Required. The new values for the device. The ``id`` and ``num_id`` fields must be empty, and the field ``name`` must specify the name path. For example, ``projects/p0/locations/us-central1/registries/registry0/devices/device0``\ or ``projects/p0/locations/us-central1/registries/registry0/devices/{num_id}``. - update_mask (~.gp_field_mask.FieldMask): + update_mask (google.protobuf.field_mask_pb2.FieldMask): Required. Only updates the ``device`` fields indicated by this mask. The field mask must not be empty, and it must not contain fields that are immutable or only set by the server. @@ -263,11 +264,12 @@ class ListDevicesRequest(proto.Message): A list of device string IDs. For example, ``['device0', 'device12']``. If empty, this field is ignored. Maximum IDs: 10,000 - field_mask (~.gp_field_mask.FieldMask): + field_mask (google.protobuf.field_mask_pb2.FieldMask): The fields of the ``Device`` resource to be returned in the response. The fields ``id`` and ``num_id`` are always - returned, along with any other fields specified. - gateway_list_options (~.device_manager.GatewayListOptions): + returned, along with any other fields specified in + snake_case format, for example: ``last_heartbeat_time``. + gateway_list_options (google.cloud.iot_v1.types.GatewayListOptions): Options related to gateways. page_size (int): The maximum number of devices to return in the response. If @@ -304,7 +306,7 @@ class GatewayListOptions(proto.Message): associations. Attributes: - gateway_type (~.resources.GatewayType): + gateway_type (google.cloud.iot_v1.types.GatewayType): If ``GATEWAY`` is specified, only gateways are returned. If ``NON_GATEWAY`` is specified, only non-gateway devices are returned. If ``GATEWAY_TYPE_UNSPECIFIED`` is specified, all @@ -336,7 +338,7 @@ class ListDevicesResponse(proto.Message): r"""Response for ``ListDevices``. Attributes: - devices (Sequence[~.resources.Device]): + devices (Sequence[google.cloud.iot_v1.types.Device]): The devices that match the request. next_page_token (str): If not empty, indicates that there may be more devices that @@ -409,7 +411,7 @@ class ListDeviceConfigVersionsResponse(proto.Message): r"""Response for ``ListDeviceConfigVersions``. Attributes: - device_configs (Sequence[~.resources.DeviceConfig]): + device_configs (Sequence[google.cloud.iot_v1.types.DeviceConfig]): The device configuration for the last few versions. Versions are listed in decreasing order, starting from the most recent one. @@ -446,7 +448,7 @@ class ListDeviceStatesResponse(proto.Message): r"""Response for ``ListDeviceStates``. Attributes: - device_states (Sequence[~.resources.DeviceState]): + device_states (Sequence[google.cloud.iot_v1.types.DeviceState]): The last few device states. States are listed in descending order of server update time, starting from the most recent one. diff --git a/google/cloud/iot_v1/types/resources.py b/google/cloud/iot_v1/types/resources.py index 8ca12ae3..cd4091ec 100644 --- a/google/cloud/iot_v1/types/resources.py +++ b/google/cloud/iot_v1/types/resources.py @@ -135,7 +135,7 @@ class Device(proto.Message): [Output only] A server-defined unique numeric ID for the device. This is a more compact way to identify devices, and it is globally unique. - credentials (Sequence[~.resources.DeviceCredential]): + credentials (Sequence[google.cloud.iot_v1.types.DeviceCredential]): The credentials used to authenticate this device. To allow credential rotation without interruption, multiple device credentials can be bound to this device. No more than 3 @@ -143,26 +143,26 @@ class Device(proto.Message): new credentials are added to a device, they are verified against the registry credentials. For details, see the description of the ``DeviceRegistry.credentials`` field. - last_heartbeat_time (~.timestamp.Timestamp): + last_heartbeat_time (google.protobuf.timestamp_pb2.Timestamp): [Output only] The last time an MQTT ``PINGREQ`` was received. This field applies only to devices connecting through MQTT. MQTT clients usually only send ``PINGREQ`` messages if the connection is idle, and no other messages have been sent. Timestamps are periodically collected and written to storage; they may be stale by a few minutes. - last_event_time (~.timestamp.Timestamp): + last_event_time (google.protobuf.timestamp_pb2.Timestamp): [Output only] The last time a telemetry event was received. Timestamps are periodically collected and written to storage; they may be stale by a few minutes. - last_state_time (~.timestamp.Timestamp): + last_state_time (google.protobuf.timestamp_pb2.Timestamp): [Output only] The last time a state event was received. Timestamps are periodically collected and written to storage; they may be stale by a few minutes. - last_config_ack_time (~.timestamp.Timestamp): + last_config_ack_time (google.protobuf.timestamp_pb2.Timestamp): [Output only] The last time a cloud-to-device config version acknowledgment was received from the device. This field is only for configurations sent through MQTT. - last_config_send_time (~.timestamp.Timestamp): + last_config_send_time (google.protobuf.timestamp_pb2.Timestamp): [Output only] The last time a cloud-to-device config version was sent to the device. blocked (bool): @@ -171,34 +171,34 @@ class Device(proto.Message): to temporarily prevent the device from connecting if, for example, the sensor is generating bad data and needs maintenance. - last_error_time (~.timestamp.Timestamp): + last_error_time (google.protobuf.timestamp_pb2.Timestamp): [Output only] The time the most recent error occurred, such as a failure to publish to Cloud Pub/Sub. This field is the timestamp of 'last_error_status'. - last_error_status (~.status.Status): + last_error_status (google.rpc.status_pb2.Status): [Output only] The error message of the most recent error, such as a failure to publish to Cloud Pub/Sub. 'last_error_time' is the timestamp of this field. If no errors have occurred, this field has an empty message and the status code 0 == OK. Otherwise, this field is expected to have a status code other than OK. - config (~.resources.DeviceConfig): + config (google.cloud.iot_v1.types.DeviceConfig): The most recent device configuration, which is eventually sent from Cloud IoT Core to the device. If not present on creation, the configuration will be initialized with an empty payload and version value of ``1``. To update this field after creation, use the ``DeviceManager.ModifyCloudToDeviceConfig`` method. - state (~.resources.DeviceState): + state (google.cloud.iot_v1.types.DeviceState): [Output only] The state most recently received from the device. If no state has been reported, this field is not present. - log_level (~.resources.LogLevel): + log_level (google.cloud.iot_v1.types.LogLevel): **Beta Feature** The logging verbosity for device activity. If unspecified, DeviceRegistry.log_level will be used. - metadata (Sequence[~.resources.Device.MetadataEntry]): + metadata (Sequence[google.cloud.iot_v1.types.Device.MetadataEntry]): The metadata key-value pairs assigned to the device. This metadata is not interpreted or indexed by Cloud IoT Core. It can be used to add contextual information for the device. @@ -212,7 +212,7 @@ class Device(proto.Message): The total size of all keys and values must be less than 256 KB, and the maximum number of key-value pairs is 500. - gateway_config (~.resources.GatewayConfig): + gateway_config (google.cloud.iot_v1.types.GatewayConfig): Gateway-related configuration and state. """ @@ -267,15 +267,15 @@ class GatewayConfig(proto.Message): r"""Gateway-related configuration and state. Attributes: - gateway_type (~.resources.GatewayType): + gateway_type (google.cloud.iot_v1.types.GatewayType): Indicates whether the device is a gateway. - gateway_auth_method (~.resources.GatewayAuthMethod): + gateway_auth_method (google.cloud.iot_v1.types.GatewayAuthMethod): Indicates how to authorize and/or authenticate devices to access the gateway. last_accessed_gateway_id (str): [Output only] The ID of the gateway the device accessed most recently. - last_accessed_gateway_time (~.timestamp.Timestamp): + last_accessed_gateway_time (google.protobuf.timestamp_pb2.Timestamp): [Output only] The most recent time at which the device accessed the gateway specified in ``last_accessed_gateway``. """ @@ -301,7 +301,7 @@ class DeviceRegistry(proto.Message): name (str): The resource path name. For example, ``projects/example-project/locations/us-central1/registries/my-registry``. - event_notification_configs (Sequence[~.resources.EventNotificationConfig]): + event_notification_configs (Sequence[google.cloud.iot_v1.types.EventNotificationConfig]): The configuration for notification of telemetry events received from the device. All telemetry events that were successfully @@ -316,7 +316,7 @@ class DeviceRegistry(proto.Message): to do so using an HTTP connection, an error is returned. Up to 10 configurations may be provided. - state_notification_config (~.resources.StateNotificationConfig): + state_notification_config (google.cloud.iot_v1.types.StateNotificationConfig): The configuration for notification of new states received from the device. State updates are guaranteed to be stored in the state @@ -325,19 +325,19 @@ class DeviceRegistry(proto.Message): misconfigured or the specified topic doesn't exist, no notification will be published but the state will still be stored in Cloud IoT Core. - mqtt_config (~.resources.MqttConfig): + mqtt_config (google.cloud.iot_v1.types.MqttConfig): The MQTT configuration for this device registry. - http_config (~.resources.HttpConfig): + http_config (google.cloud.iot_v1.types.HttpConfig): The DeviceService (HTTP) configuration for this device registry. - log_level (~.resources.LogLevel): + log_level (google.cloud.iot_v1.types.LogLevel): **Beta Feature** The default logging verbosity for activity from devices in this registry. The verbosity level can be overridden by Device.log_level. - credentials (Sequence[~.resources.RegistryCredential]): + credentials (Sequence[google.cloud.iot_v1.types.RegistryCredential]): The credentials used to verify the device credentials. No more than 10 credentials can be bound to a single registry at a time. The @@ -383,7 +383,7 @@ class MqttConfig(proto.Message): r"""The configuration of MQTT for a device registry. Attributes: - mqtt_enabled_state (~.resources.MqttState): + mqtt_enabled_state (google.cloud.iot_v1.types.MqttState): If enabled, allows connections using the MQTT protocol. Otherwise, MQTT connections to this registry will fail. @@ -396,7 +396,7 @@ class HttpConfig(proto.Message): r"""The configuration of the HTTP bridge for a device registry. Attributes: - http_enabled_state (~.resources.HttpState): + http_enabled_state (google.cloud.iot_v1.types.HttpState): If enabled, allows devices to use DeviceService via the HTTP protocol. Otherwise, any requests to DeviceService will fail for this @@ -445,7 +445,7 @@ class RegistryCredential(proto.Message): credentials. Attributes: - public_key_certificate (~.resources.PublicKeyCertificate): + public_key_certificate (google.cloud.iot_v1.types.PublicKeyCertificate): A public key certificate used to verify the device credentials. """ @@ -465,9 +465,9 @@ class X509CertificateDetails(proto.Message): subject (str): The entity the certificate and public key belong to. - start_time (~.timestamp.Timestamp): + start_time (google.protobuf.timestamp_pb2.Timestamp): The time the certificate becomes valid. - expiry_time (~.timestamp.Timestamp): + expiry_time (google.protobuf.timestamp_pb2.Timestamp): The time the certificate becomes invalid. signature_algorithm (str): The algorithm used to sign the certificate. @@ -492,11 +492,11 @@ class PublicKeyCertificate(proto.Message): r"""A public key certificate format and data. Attributes: - format (~.resources.PublicKeyCertificateFormat): + format (google.cloud.iot_v1.types.PublicKeyCertificateFormat): The certificate format. certificate (str): The certificate data. - x509_details (~.resources.X509CertificateDetails): + x509_details (google.cloud.iot_v1.types.X509CertificateDetails): [Output only] The certificate details. Used only for X.509 certificates. """ @@ -505,14 +505,16 @@ class PublicKeyCertificate(proto.Message): certificate = proto.Field(proto.STRING, number=2) - x509_details = proto.Field(proto.MESSAGE, number=3, message=X509CertificateDetails,) + x509_details = proto.Field( + proto.MESSAGE, number=3, message="X509CertificateDetails", + ) class DeviceCredential(proto.Message): r"""A server-stored device credential used for authentication. Attributes: - public_key (~.resources.PublicKeyCredential): + public_key (google.cloud.iot_v1.types.PublicKeyCredential): A public key used to verify the signature of JSON Web Tokens (JWTs). When adding a new device credential, either via device creation or via @@ -529,7 +531,7 @@ class DeviceCredential(proto.Message): keys will be accepted. New device credentials must be different from every registry-level certificate. - expiration_time (~.timestamp.Timestamp): + expiration_time (google.protobuf.timestamp_pb2.Timestamp): [Optional] The time at which this credential becomes invalid. This credential will be ignored for new client authentication requests after this timestamp; however, it @@ -547,7 +549,7 @@ class PublicKeyCredential(proto.Message): r"""A public key format and data. Attributes: - format (~.resources.PublicKeyFormat): + format (google.cloud.iot_v1.types.PublicKeyFormat): The format of the key. key (str): The key data. @@ -568,11 +570,11 @@ class DeviceConfig(proto.Message): after device creation. The version must be 0 on the ``CreateDevice`` request if a ``config`` is specified; the response of ``CreateDevice`` will always have a value of 1. - cloud_update_time (~.timestamp.Timestamp): + cloud_update_time (google.protobuf.timestamp_pb2.Timestamp): [Output only] The time at which this configuration version was updated in Cloud IoT Core. This timestamp is set by the server. - device_ack_time (~.timestamp.Timestamp): + device_ack_time (google.protobuf.timestamp_pb2.Timestamp): [Output only] The time at which Cloud IoT Core received the acknowledgment from the device, indicating that the device has received this configuration version. If this field is @@ -603,7 +605,7 @@ class DeviceState(proto.Message): r"""The device state, as reported by the device. Attributes: - update_time (~.timestamp.Timestamp): + update_time (google.protobuf.timestamp_pb2.Timestamp): [Output only] The time at which this state version was updated in Cloud IoT Core. binary_data (bytes): diff --git a/noxfile.py b/noxfile.py index 132eded7..4d37cd3a 100644 --- a/noxfile.py +++ b/noxfile.py @@ -18,6 +18,7 @@ from __future__ import absolute_import import os +import pathlib import shutil import nox @@ -28,7 +29,23 @@ DEFAULT_PYTHON_VERSION = "3.8" SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] -UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"] +UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -70,18 +87,23 @@ def lint_setup_py(session): def default(session): # Install all test dependencies, then install this package in-place. - session.install("asyncmock", "pytest-asyncio") - session.install("mock", "pytest", "pytest-cov") - session.install("-e", ".") + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) + session.install("asyncmock", "pytest-asyncio", "-c", constraints_path) + + session.install("mock", "pytest", "pytest-cov", "-c", constraints_path) + + session.install("-e", ".", "-c", constraints_path) # Run py.test against the unit tests. session.run( "py.test", "--quiet", - "--cov=google.cloud.cloudiot", - "--cov=google.cloud", - "--cov=tests.unit", + f"--junitxml=unit_{session.python}_sponge_log.xml", + "--cov=google/cloud", + "--cov=tests/unit", "--cov-append", "--cov-config=.coveragerc", "--cov-report=", @@ -100,6 +122,9 @@ def unit(session): @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") @@ -109,6 +134,9 @@ def system(session): # Sanity check: Only run tests if the environment variable is set. if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): session.skip("Credentials must be set via environment variable") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") system_test_exists = os.path.exists(system_test_path) system_test_folder_exists = os.path.exists(system_test_folder_path) @@ -121,16 +149,26 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. - session.install( - "mock", "pytest", "google-cloud-testutils", - ) - session.install("-e", ".") + session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path) + session.install("-e", ".", "-c", constraints_path) # Run py.test against the system tests. if system_test_exists: - session.run("py.test", "--quiet", system_test_path, *session.posargs) + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) if system_test_folder_exists: - session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -141,7 +179,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=99") + session.run("coverage", "report", "--show-missing", "--fail-under=98") session.run("coverage", "erase") @@ -151,7 +189,7 @@ def docs(session): """Build the docs for this library.""" session.install("-e", ".") - session.install("sphinx<3.0.0", "alabaster", "recommonmark") + session.install("sphinx", "alabaster", "recommonmark") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( @@ -173,7 +211,7 @@ def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") - session.install("sphinx<3.0.0", "alabaster", "recommonmark", "sphinx-docfx-yaml") + session.install("sphinx", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/renovate.json b/renovate.json index 4fa94931..f08bc22c 100644 --- a/renovate.json +++ b/renovate.json @@ -1,5 +1,6 @@ { "extends": [ "config:base", ":preserveSemverRanges" - ] + ], + "ignorePaths": [".pre-commit-config.yaml"] } diff --git a/samples/api-client/manager/noxfile.py b/samples/api-client/manager/noxfile.py index 5660f08b..97bf7da8 100644 --- a/samples/api-client/manager/noxfile.py +++ b/samples/api-client/manager/noxfile.py @@ -17,6 +17,7 @@ import os from pathlib import Path import sys +from typing import Callable, Dict, List, Optional import nox @@ -37,22 +38,28 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - "ignored_versions": ["2.7"], + 'ignored_versions': ["2.7"], + + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + 'enforce_type_hints': False, + # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - "envs": {}, + 'envs': {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append(".") + sys.path.append('.') from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -62,26 +69,26 @@ TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) -def get_pytest_env_vars(): +def get_pytest_env_vars() -> Dict[str, str]: """Returns a dict for pytest invocation.""" ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG["gcloud_project_env"] + env_key = TEST_CONFIG['gcloud_project_env'] # This should error out if not set. - ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG["envs"]) + ret.update(TEST_CONFIG['envs']) return ret # DO NOT EDIT - automatically generated. # All versions used to tested samples. -ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] +ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8", "3.9"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] +IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) @@ -91,7 +98,7 @@ def get_pytest_env_vars(): # -def _determine_local_import_names(start_dir): +def _determine_local_import_names(start_dir: str) -> List[str]: """Determines all import names that should be considered "local". This is used when running the linter to insure that import order is @@ -129,17 +136,30 @@ def _determine_local_import_names(start_dir): @nox.session -def lint(session): - session.install("flake8", "flake8-import-order") +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG['enforce_type_hints']: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") local_names = _determine_local_import_names(".") args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - ".", + "." ] session.run("flake8", *args) +# +# Black +# + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + session.install("black") + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) # # Sample Tests @@ -149,7 +169,7 @@ def lint(session): PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] -def _session_tests(session, post_install=None): +def _session_tests(session: nox.sessions.Session, post_install: Callable = None) -> None: """Runs py.test for a particular project.""" if os.path.exists("requirements.txt"): session.install("-r", "requirements.txt") @@ -175,14 +195,14 @@ def _session_tests(session, post_install=None): @nox.session(python=ALL_VERSIONS) -def py(session): +def py(session: nox.sessions.Session) -> None: """Runs py.test for a sample using the specified version of Python.""" if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip( - "SKIPPED: {} tests are disabled for this sample.".format(session.python) - ) + session.skip("SKIPPED: {} tests are disabled for this sample.".format( + session.python + )) # @@ -190,7 +210,7 @@ def py(session): # -def _get_repo_root(): +def _get_repo_root() -> Optional[str]: """ Returns the root folder of the project. """ # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) @@ -199,6 +219,11 @@ def _get_repo_root(): break if Path(p / ".git").exists(): return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) p = p.parent raise Exception("Unable to detect repository root.") @@ -208,7 +233,7 @@ def _get_repo_root(): @nox.session @nox.parametrize("path", GENERATED_READMES) -def readmegen(session, path): +def readmegen(session: nox.sessions.Session, path: str) -> None: """(Re-)generates the readme for a sample.""" session.install("jinja2", "pyyaml") dir_ = os.path.dirname(path) diff --git a/scripts/fixup_iot_v1_keywords.py b/scripts/fixup_iot_v1_keywords.py index 1c0e0056..5339bf3d 100644 --- a/scripts/fixup_iot_v1_keywords.py +++ b/scripts/fixup_iot_v1_keywords.py @@ -1,3 +1,4 @@ +#! /usr/bin/env python3 # -*- coding: utf-8 -*- # Copyright 2020 Google LLC diff --git a/setup.py b/setup.py index c9a70042..49db6f94 100644 --- a/setup.py +++ b/setup.py @@ -22,12 +22,13 @@ version = "2.0.2" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.22.0, < 2.0.0dev", + "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", "proto-plus >= 1.4.0", - "libcst >= 0.2.5", ] +extras = {"libcst": "libcst >= 0.2.5"} + package_root = os.path.abspath(os.path.dirname(__file__)) readme_filename = os.path.join(package_root, "README.rst") @@ -69,6 +70,7 @@ packages=packages, namespace_packages=namespaces, install_requires=dependencies, + extras_requires=extras, python_requires=">=3.6", scripts=["scripts/fixup_iot_v1_keywords.py"], include_package_data=True, diff --git a/synth.metadata b/synth.metadata index a0e77355..104efaa9 100644 --- a/synth.metadata +++ b/synth.metadata @@ -3,22 +3,30 @@ { "git": { "name": ".", - "remote": "https://github.com/googleapis/python-iot.git", - "sha": "43f1f1a2b00d2e808afc12f08f674f4232de043d" + "remote": "git@github.com:googleapis/python-iot.git", + "sha": "f32b86c65627987ca4155a95699a619fefd3285c" + } + }, + { + "git": { + "name": "googleapis", + "remote": "https://github.com/googleapis/googleapis.git", + "sha": "8ff7d794576311d3d68d4df2ac6da93bbfcd7476", + "internalRef": "366472163" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "d302f93d7f47e2852e585ac35ab2d15585717ec0" + "sha": "ff39353f34a36e7643b86e97724e4027ab466dc6" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "d302f93d7f47e2852e585ac35ab2d15585717ec0" + "sha": "ff39353f34a36e7643b86e97724e4027ab466dc6" } } ], diff --git a/synth.py b/synth.py index 213f17c7..4c114e62 100644 --- a/synth.py +++ b/synth.py @@ -38,16 +38,21 @@ templated_files = common.py_library( samples=True, microgenerator=True, - cov_level=99, + cov_level=98, ) s.move(templated_files, excludes=[".coveragerc"]) # microgenerator has a good .coveragerc file +# Rename `format_` to `format` to avoid breaking change +s.replace( + "google/cloud/**/types/resources.py", + "format_", + "format" +) + # ---------------------------------------------------------------------------- # Samples templates # ---------------------------------------------------------------------------- python.py_samples() -# TODO(busunkim): Use latest sphinx after microgenerator transition -s.replace("noxfile.py", """['"]sphinx['"]""", '"sphinx<3.0.0"') s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index 761e75e9..2686dc3f 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -1,11 +1,11 @@ # This constraints file is used to check that lower bounds # are correct in setup.py -# List *all* library dependencies and extras in this file. +# List all library dependencies and extras in this file. # Pin the version to the lower bound. -# -# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", -# Then this file should have foo==1.14.0 -google-api-core==1.22.0 -grpc-google-iam-v1==0.12.3 + +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.22.2 proto-plus==1.4.0 -libcst==0.2.5 \ No newline at end of file +grpc-google-iam-v1==0.12.3 +libcst==0.2.5 diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index e69de29b..da93009b 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -0,0 +1,2 @@ +# This constraints file is left inentionally empty +# so the latest version of dependencies is installed \ No newline at end of file diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt index e69de29b..da93009b 100644 --- a/testing/constraints-3.8.txt +++ b/testing/constraints-3.8.txt @@ -0,0 +1,2 @@ +# This constraints file is left inentionally empty +# so the latest version of dependencies is installed \ No newline at end of file diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt index e69de29b..da93009b 100644 --- a/testing/constraints-3.9.txt +++ b/testing/constraints-3.9.txt @@ -0,0 +1,2 @@ +# This constraints file is left inentionally empty +# so the latest version of dependencies is installed \ No newline at end of file diff --git a/tests/unit/gapic/iot_v1/__init__.py b/tests/unit/gapic/iot_v1/__init__.py index 8b137891..42ffdf2b 100644 --- a/tests/unit/gapic/iot_v1/__init__.py +++ b/tests/unit/gapic/iot_v1/__init__.py @@ -1 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/iot_v1/test_device_manager.py b/tests/unit/gapic/iot_v1/test_device_manager.py index 2827a327..e6ba6207 100644 --- a/tests/unit/gapic/iot_v1/test_device_manager.py +++ b/tests/unit/gapic/iot_v1/test_device_manager.py @@ -42,7 +42,7 @@ from google.iam.v1 import options_pb2 as options # type: ignore from google.iam.v1 import policy_pb2 as policy # type: ignore from google.oauth2 import service_account -from google.protobuf import any_pb2 as any # type: ignore +from google.protobuf import any_pb2 as gp_any # type: ignore from google.protobuf import field_mask_pb2 as field_mask # type: ignore from google.protobuf import timestamp_pb2 as timestamp # type: ignore from google.rpc import status_pb2 as status # type: ignore @@ -94,7 +94,24 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [DeviceManagerClient, DeviceManagerAsyncClient] + "client_class", [DeviceManagerClient, DeviceManagerAsyncClient,] +) +def test_device_manager_client_from_service_account_info(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "cloudiot.googleapis.com:443" + + +@pytest.mark.parametrize( + "client_class", [DeviceManagerClient, DeviceManagerAsyncClient,] ) def test_device_manager_client_from_service_account_file(client_class): creds = credentials.AnonymousCredentials() @@ -103,17 +120,22 @@ def test_device_manager_client_from_service_account_file(client_class): ) as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds + assert isinstance(client, client_class) - assert client._transport._host == "cloudiot.googleapis.com:443" + assert client.transport._host == "cloudiot.googleapis.com:443" def test_device_manager_client_get_transport_class(): transport = DeviceManagerClient.get_transport_class() - assert transport == transports.DeviceManagerGrpcTransport + available_transports = [ + transports.DeviceManagerGrpcTransport, + ] + assert transport in available_transports transport = DeviceManagerClient.get_transport_class("grpc") assert transport == transports.DeviceManagerGrpcTransport @@ -164,15 +186,14 @@ def test_device_manager_client_client_options( credentials_file=None, host="squid.clam.whelk", scopes=None, - api_mtls_endpoint="squid.clam.whelk", - client_cert_source=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class() @@ -181,15 +202,14 @@ def test_device_manager_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class() @@ -198,56 +218,142 @@ def test_device_manager_client_client_options( credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and client_cert_source is provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (DeviceManagerClient, transports.DeviceManagerGrpcTransport, "grpc", "true"), + ( + DeviceManagerAsyncClient, + transports.DeviceManagerGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (DeviceManagerClient, transports.DeviceManagerGrpcTransport, "grpc", "false"), + ( + DeviceManagerAsyncClient, + transports.DeviceManagerGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + DeviceManagerClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DeviceManagerClient), +) +@mock.patch.object( + DeviceManagerAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DeviceManagerAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_device_manager_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): options = client_options.ClientOptions( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, + host=expected_host, scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=client_cert_source_callback, + client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and default_client_cert_source is provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( "google.auth.transport.mtls.has_default_client_cert_source", return_value=True, ): - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", but client_cert_source and default_client_cert_source are None. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( "google.auth.transport.mtls.has_default_client_cert_source", @@ -260,34 +366,11 @@ def test_device_manager_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class() - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -313,8 +396,7 @@ def test_device_manager_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -344,8 +426,7 @@ def test_device_manager_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -364,8 +445,7 @@ def test_device_manager_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - api_mtls_endpoint="squid.clam.whelk", - client_cert_source=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -384,7 +464,7 @@ def test_create_device_registry( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_device_registry), "__call__" + type(client.transport.create_device_registry), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DeviceRegistry( @@ -400,6 +480,7 @@ def test_create_device_registry( assert args[0] == device_manager.CreateDeviceRegistryRequest() # Establish that the response is the type that we expect. + assert isinstance(response, resources.DeviceRegistry) assert response.id == "id_value" @@ -413,19 +494,40 @@ def test_create_device_registry_from_dict(): test_create_device_registry(request_type=dict) +def test_create_device_registry_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeviceManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_device_registry), "__call__" + ) as call: + client.create_device_registry() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == device_manager.CreateDeviceRegistryRequest() + + @pytest.mark.asyncio -async def test_create_device_registry_async(transport: str = "grpc_asyncio"): +async def test_create_device_registry_async( + transport: str = "grpc_asyncio", + request_type=device_manager.CreateDeviceRegistryRequest, +): client = DeviceManagerAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = device_manager.CreateDeviceRegistryRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_device_registry), "__call__" + type(client.transport.create_device_registry), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -440,7 +542,7 @@ async def test_create_device_registry_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == device_manager.CreateDeviceRegistryRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.DeviceRegistry) @@ -452,6 +554,11 @@ async def test_create_device_registry_async(transport: str = "grpc_asyncio"): assert response.log_level == resources.LogLevel.NONE +@pytest.mark.asyncio +async def test_create_device_registry_async_from_dict(): + await test_create_device_registry_async(request_type=dict) + + def test_create_device_registry_field_headers(): client = DeviceManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -462,7 +569,7 @@ def test_create_device_registry_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_device_registry), "__call__" + type(client.transport.create_device_registry), "__call__" ) as call: call.return_value = resources.DeviceRegistry() @@ -489,7 +596,7 @@ async def test_create_device_registry_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_device_registry), "__call__" + type(client.transport.create_device_registry), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.DeviceRegistry() @@ -512,7 +619,7 @@ def test_create_device_registry_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_device_registry), "__call__" + type(client.transport.create_device_registry), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DeviceRegistry() @@ -553,7 +660,7 @@ async def test_create_device_registry_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_device_registry), "__call__" + type(client.transport.create_device_registry), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DeviceRegistry() @@ -605,7 +712,7 @@ def test_get_device_registry( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.get_device_registry), "__call__" + type(client.transport.get_device_registry), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DeviceRegistry( @@ -621,6 +728,7 @@ def test_get_device_registry( assert args[0] == device_manager.GetDeviceRegistryRequest() # Establish that the response is the type that we expect. + assert isinstance(response, resources.DeviceRegistry) assert response.id == "id_value" @@ -634,19 +742,40 @@ def test_get_device_registry_from_dict(): test_get_device_registry(request_type=dict) +def test_get_device_registry_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeviceManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_device_registry), "__call__" + ) as call: + client.get_device_registry() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == device_manager.GetDeviceRegistryRequest() + + @pytest.mark.asyncio -async def test_get_device_registry_async(transport: str = "grpc_asyncio"): +async def test_get_device_registry_async( + transport: str = "grpc_asyncio", + request_type=device_manager.GetDeviceRegistryRequest, +): client = DeviceManagerAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = device_manager.GetDeviceRegistryRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.get_device_registry), "__call__" + type(client.transport.get_device_registry), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -661,7 +790,7 @@ async def test_get_device_registry_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == device_manager.GetDeviceRegistryRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.DeviceRegistry) @@ -673,6 +802,11 @@ async def test_get_device_registry_async(transport: str = "grpc_asyncio"): assert response.log_level == resources.LogLevel.NONE +@pytest.mark.asyncio +async def test_get_device_registry_async_from_dict(): + await test_get_device_registry_async(request_type=dict) + + def test_get_device_registry_field_headers(): client = DeviceManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -683,7 +817,7 @@ def test_get_device_registry_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.get_device_registry), "__call__" + type(client.transport.get_device_registry), "__call__" ) as call: call.return_value = resources.DeviceRegistry() @@ -710,7 +844,7 @@ async def test_get_device_registry_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.get_device_registry), "__call__" + type(client.transport.get_device_registry), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.DeviceRegistry() @@ -733,7 +867,7 @@ def test_get_device_registry_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.get_device_registry), "__call__" + type(client.transport.get_device_registry), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DeviceRegistry() @@ -767,7 +901,7 @@ async def test_get_device_registry_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.get_device_registry), "__call__" + type(client.transport.get_device_registry), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DeviceRegistry() @@ -812,7 +946,7 @@ def test_update_device_registry( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_device_registry), "__call__" + type(client.transport.update_device_registry), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DeviceRegistry( @@ -828,6 +962,7 @@ def test_update_device_registry( assert args[0] == device_manager.UpdateDeviceRegistryRequest() # Establish that the response is the type that we expect. + assert isinstance(response, resources.DeviceRegistry) assert response.id == "id_value" @@ -841,19 +976,40 @@ def test_update_device_registry_from_dict(): test_update_device_registry(request_type=dict) +def test_update_device_registry_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeviceManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_device_registry), "__call__" + ) as call: + client.update_device_registry() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == device_manager.UpdateDeviceRegistryRequest() + + @pytest.mark.asyncio -async def test_update_device_registry_async(transport: str = "grpc_asyncio"): +async def test_update_device_registry_async( + transport: str = "grpc_asyncio", + request_type=device_manager.UpdateDeviceRegistryRequest, +): client = DeviceManagerAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = device_manager.UpdateDeviceRegistryRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_device_registry), "__call__" + type(client.transport.update_device_registry), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -868,7 +1024,7 @@ async def test_update_device_registry_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == device_manager.UpdateDeviceRegistryRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.DeviceRegistry) @@ -880,6 +1036,11 @@ async def test_update_device_registry_async(transport: str = "grpc_asyncio"): assert response.log_level == resources.LogLevel.NONE +@pytest.mark.asyncio +async def test_update_device_registry_async_from_dict(): + await test_update_device_registry_async(request_type=dict) + + def test_update_device_registry_field_headers(): client = DeviceManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -890,7 +1051,7 @@ def test_update_device_registry_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_device_registry), "__call__" + type(client.transport.update_device_registry), "__call__" ) as call: call.return_value = resources.DeviceRegistry() @@ -920,7 +1081,7 @@ async def test_update_device_registry_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_device_registry), "__call__" + type(client.transport.update_device_registry), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.DeviceRegistry() @@ -946,7 +1107,7 @@ def test_update_device_registry_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_device_registry), "__call__" + type(client.transport.update_device_registry), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DeviceRegistry() @@ -987,7 +1148,7 @@ async def test_update_device_registry_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_device_registry), "__call__" + type(client.transport.update_device_registry), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DeviceRegistry() @@ -1039,7 +1200,7 @@ def test_delete_device_registry( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_device_registry), "__call__" + type(client.transport.delete_device_registry), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -1060,19 +1221,40 @@ def test_delete_device_registry_from_dict(): test_delete_device_registry(request_type=dict) +def test_delete_device_registry_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeviceManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_device_registry), "__call__" + ) as call: + client.delete_device_registry() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == device_manager.DeleteDeviceRegistryRequest() + + @pytest.mark.asyncio -async def test_delete_device_registry_async(transport: str = "grpc_asyncio"): +async def test_delete_device_registry_async( + transport: str = "grpc_asyncio", + request_type=device_manager.DeleteDeviceRegistryRequest, +): client = DeviceManagerAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = device_manager.DeleteDeviceRegistryRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_device_registry), "__call__" + type(client.transport.delete_device_registry), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -1083,12 +1265,17 @@ async def test_delete_device_registry_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == device_manager.DeleteDeviceRegistryRequest() # Establish that the response is the type that we expect. assert response is None +@pytest.mark.asyncio +async def test_delete_device_registry_async_from_dict(): + await test_delete_device_registry_async(request_type=dict) + + def test_delete_device_registry_field_headers(): client = DeviceManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -1099,7 +1286,7 @@ def test_delete_device_registry_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_device_registry), "__call__" + type(client.transport.delete_device_registry), "__call__" ) as call: call.return_value = None @@ -1126,7 +1313,7 @@ async def test_delete_device_registry_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_device_registry), "__call__" + type(client.transport.delete_device_registry), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -1147,7 +1334,7 @@ def test_delete_device_registry_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_device_registry), "__call__" + type(client.transport.delete_device_registry), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -1181,7 +1368,7 @@ async def test_delete_device_registry_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_device_registry), "__call__" + type(client.transport.delete_device_registry), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -1224,7 +1411,7 @@ def test_list_device_registries( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_device_registries), "__call__" + type(client.transport.list_device_registries), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = device_manager.ListDeviceRegistriesResponse( @@ -1240,6 +1427,7 @@ def test_list_device_registries( assert args[0] == device_manager.ListDeviceRegistriesRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeviceRegistriesPager) assert response.next_page_token == "next_page_token_value" @@ -1249,19 +1437,40 @@ def test_list_device_registries_from_dict(): test_list_device_registries(request_type=dict) +def test_list_device_registries_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeviceManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_device_registries), "__call__" + ) as call: + client.list_device_registries() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == device_manager.ListDeviceRegistriesRequest() + + @pytest.mark.asyncio -async def test_list_device_registries_async(transport: str = "grpc_asyncio"): +async def test_list_device_registries_async( + transport: str = "grpc_asyncio", + request_type=device_manager.ListDeviceRegistriesRequest, +): client = DeviceManagerAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = device_manager.ListDeviceRegistriesRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_device_registries), "__call__" + type(client.transport.list_device_registries), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -1276,7 +1485,7 @@ async def test_list_device_registries_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == device_manager.ListDeviceRegistriesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListDeviceRegistriesAsyncPager) @@ -1284,6 +1493,11 @@ async def test_list_device_registries_async(transport: str = "grpc_asyncio"): assert response.next_page_token == "next_page_token_value" +@pytest.mark.asyncio +async def test_list_device_registries_async_from_dict(): + await test_list_device_registries_async(request_type=dict) + + def test_list_device_registries_field_headers(): client = DeviceManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -1294,7 +1508,7 @@ def test_list_device_registries_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_device_registries), "__call__" + type(client.transport.list_device_registries), "__call__" ) as call: call.return_value = device_manager.ListDeviceRegistriesResponse() @@ -1321,7 +1535,7 @@ async def test_list_device_registries_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_device_registries), "__call__" + type(client.transport.list_device_registries), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( device_manager.ListDeviceRegistriesResponse() @@ -1344,7 +1558,7 @@ def test_list_device_registries_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_device_registries), "__call__" + type(client.transport.list_device_registries), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = device_manager.ListDeviceRegistriesResponse() @@ -1378,7 +1592,7 @@ async def test_list_device_registries_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_device_registries), "__call__" + type(client.transport.list_device_registries), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = device_manager.ListDeviceRegistriesResponse() @@ -1415,7 +1629,7 @@ def test_list_device_registries_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_device_registries), "__call__" + type(client.transport.list_device_registries), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -1460,7 +1674,7 @@ def test_list_device_registries_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_device_registries), "__call__" + type(client.transport.list_device_registries), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -1497,7 +1711,7 @@ async def test_list_device_registries_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_device_registries), + type(client.transport.list_device_registries), "__call__", new_callable=mock.AsyncMock, ) as call: @@ -1541,7 +1755,7 @@ async def test_list_device_registries_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_device_registries), + type(client.transport.list_device_registries), "__call__", new_callable=mock.AsyncMock, ) as call: @@ -1588,7 +1802,7 @@ def test_create_device( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_device), "__call__") as call: + with mock.patch.object(type(client.transport.create_device), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Device( id="id_value", @@ -1607,6 +1821,7 @@ def test_create_device( assert args[0] == device_manager.CreateDeviceRequest() # Establish that the response is the type that we expect. + assert isinstance(response, resources.Device) assert response.id == "id_value" @@ -1624,20 +1839,36 @@ def test_create_device_from_dict(): test_create_device(request_type=dict) +def test_create_device_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeviceManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_device), "__call__") as call: + client.create_device() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == device_manager.CreateDeviceRequest() + + @pytest.mark.asyncio -async def test_create_device_async(transport: str = "grpc_asyncio"): +async def test_create_device_async( + transport: str = "grpc_asyncio", request_type=device_manager.CreateDeviceRequest +): client = DeviceManagerAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = device_manager.CreateDeviceRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_device), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_device), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.Device( @@ -1655,7 +1886,7 @@ async def test_create_device_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == device_manager.CreateDeviceRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.Device) @@ -1671,6 +1902,11 @@ async def test_create_device_async(transport: str = "grpc_asyncio"): assert response.log_level == resources.LogLevel.NONE +@pytest.mark.asyncio +async def test_create_device_async_from_dict(): + await test_create_device_async(request_type=dict) + + def test_create_device_field_headers(): client = DeviceManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -1680,7 +1916,7 @@ def test_create_device_field_headers(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_device), "__call__") as call: + with mock.patch.object(type(client.transport.create_device), "__call__") as call: call.return_value = resources.Device() client.create_device(request) @@ -1705,9 +1941,7 @@ async def test_create_device_field_headers_async(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_device), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_device), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Device()) await client.create_device(request) @@ -1726,7 +1960,7 @@ def test_create_device_flattened(): client = DeviceManagerClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_device), "__call__") as call: + with mock.patch.object(type(client.transport.create_device), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Device() @@ -1764,9 +1998,7 @@ async def test_create_device_flattened_async(): client = DeviceManagerAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_device), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_device), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Device() @@ -1813,7 +2045,7 @@ def test_get_device( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_device), "__call__") as call: + with mock.patch.object(type(client.transport.get_device), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Device( id="id_value", @@ -1832,6 +2064,7 @@ def test_get_device( assert args[0] == device_manager.GetDeviceRequest() # Establish that the response is the type that we expect. + assert isinstance(response, resources.Device) assert response.id == "id_value" @@ -1849,20 +2082,36 @@ def test_get_device_from_dict(): test_get_device(request_type=dict) +def test_get_device_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeviceManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_device), "__call__") as call: + client.get_device() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == device_manager.GetDeviceRequest() + + @pytest.mark.asyncio -async def test_get_device_async(transport: str = "grpc_asyncio"): +async def test_get_device_async( + transport: str = "grpc_asyncio", request_type=device_manager.GetDeviceRequest +): client = DeviceManagerAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = device_manager.GetDeviceRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_device), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_device), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.Device( @@ -1880,7 +2129,7 @@ async def test_get_device_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == device_manager.GetDeviceRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.Device) @@ -1896,6 +2145,11 @@ async def test_get_device_async(transport: str = "grpc_asyncio"): assert response.log_level == resources.LogLevel.NONE +@pytest.mark.asyncio +async def test_get_device_async_from_dict(): + await test_get_device_async(request_type=dict) + + def test_get_device_field_headers(): client = DeviceManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -1905,7 +2159,7 @@ def test_get_device_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_device), "__call__") as call: + with mock.patch.object(type(client.transport.get_device), "__call__") as call: call.return_value = resources.Device() client.get_device(request) @@ -1930,9 +2184,7 @@ async def test_get_device_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_device), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_device), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Device()) await client.get_device(request) @@ -1951,7 +2203,7 @@ def test_get_device_flattened(): client = DeviceManagerClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_device), "__call__") as call: + with mock.patch.object(type(client.transport.get_device), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Device() @@ -1983,9 +2235,7 @@ async def test_get_device_flattened_async(): client = DeviceManagerAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_device), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_device), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Device() @@ -2026,7 +2276,7 @@ def test_update_device( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_device), "__call__") as call: + with mock.patch.object(type(client.transport.update_device), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Device( id="id_value", @@ -2045,6 +2295,7 @@ def test_update_device( assert args[0] == device_manager.UpdateDeviceRequest() # Establish that the response is the type that we expect. + assert isinstance(response, resources.Device) assert response.id == "id_value" @@ -2062,20 +2313,36 @@ def test_update_device_from_dict(): test_update_device(request_type=dict) -@pytest.mark.asyncio -async def test_update_device_async(transport: str = "grpc_asyncio"): - client = DeviceManagerAsyncClient( - credentials=credentials.AnonymousCredentials(), transport=transport, +def test_update_device_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeviceManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", ) - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = device_manager.UpdateDeviceRequest() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_device), "__call__") as call: + client.update_device() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == device_manager.UpdateDeviceRequest() + + +@pytest.mark.asyncio +async def test_update_device_async( + transport: str = "grpc_asyncio", request_type=device_manager.UpdateDeviceRequest +): + client = DeviceManagerAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_device), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_device), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.Device( @@ -2093,7 +2360,7 @@ async def test_update_device_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == device_manager.UpdateDeviceRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.Device) @@ -2109,6 +2376,11 @@ async def test_update_device_async(transport: str = "grpc_asyncio"): assert response.log_level == resources.LogLevel.NONE +@pytest.mark.asyncio +async def test_update_device_async_from_dict(): + await test_update_device_async(request_type=dict) + + def test_update_device_field_headers(): client = DeviceManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -2118,7 +2390,7 @@ def test_update_device_field_headers(): request.device.name = "device.name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_device), "__call__") as call: + with mock.patch.object(type(client.transport.update_device), "__call__") as call: call.return_value = resources.Device() client.update_device(request) @@ -2143,9 +2415,7 @@ async def test_update_device_field_headers_async(): request.device.name = "device.name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_device), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_device), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Device()) await client.update_device(request) @@ -2164,7 +2434,7 @@ def test_update_device_flattened(): client = DeviceManagerClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.update_device), "__call__") as call: + with mock.patch.object(type(client.transport.update_device), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Device() @@ -2203,9 +2473,7 @@ async def test_update_device_flattened_async(): client = DeviceManagerAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.update_device), "__call__" - ) as call: + with mock.patch.object(type(client.transport.update_device), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = resources.Device() @@ -2253,7 +2521,7 @@ def test_delete_device( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_device), "__call__") as call: + with mock.patch.object(type(client.transport.delete_device), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2273,20 +2541,36 @@ def test_delete_device_from_dict(): test_delete_device(request_type=dict) +def test_delete_device_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeviceManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_device), "__call__") as call: + client.delete_device() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == device_manager.DeleteDeviceRequest() + + @pytest.mark.asyncio -async def test_delete_device_async(transport: str = "grpc_asyncio"): +async def test_delete_device_async( + transport: str = "grpc_asyncio", request_type=device_manager.DeleteDeviceRequest +): client = DeviceManagerAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = device_manager.DeleteDeviceRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_device), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_device), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -2296,12 +2580,17 @@ async def test_delete_device_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == device_manager.DeleteDeviceRequest() # Establish that the response is the type that we expect. assert response is None +@pytest.mark.asyncio +async def test_delete_device_async_from_dict(): + await test_delete_device_async(request_type=dict) + + def test_delete_device_field_headers(): client = DeviceManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -2311,7 +2600,7 @@ def test_delete_device_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_device), "__call__") as call: + with mock.patch.object(type(client.transport.delete_device), "__call__") as call: call.return_value = None client.delete_device(request) @@ -2336,9 +2625,7 @@ async def test_delete_device_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_device), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_device), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_device(request) @@ -2357,7 +2644,7 @@ def test_delete_device_flattened(): client = DeviceManagerClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_device), "__call__") as call: + with mock.patch.object(type(client.transport.delete_device), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2389,9 +2676,7 @@ async def test_delete_device_flattened_async(): client = DeviceManagerAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_device), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_device), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2432,7 +2717,7 @@ def test_list_devices( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_devices), "__call__") as call: + with mock.patch.object(type(client.transport.list_devices), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = device_manager.ListDevicesResponse( next_page_token="next_page_token_value", @@ -2447,6 +2732,7 @@ def test_list_devices( assert args[0] == device_manager.ListDevicesRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDevicesPager) assert response.next_page_token == "next_page_token_value" @@ -2456,20 +2742,36 @@ def test_list_devices_from_dict(): test_list_devices(request_type=dict) +def test_list_devices_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeviceManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_devices), "__call__") as call: + client.list_devices() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == device_manager.ListDevicesRequest() + + @pytest.mark.asyncio -async def test_list_devices_async(transport: str = "grpc_asyncio"): +async def test_list_devices_async( + transport: str = "grpc_asyncio", request_type=device_manager.ListDevicesRequest +): client = DeviceManagerAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = device_manager.ListDevicesRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_devices), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_devices), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( device_manager.ListDevicesResponse(next_page_token="next_page_token_value",) @@ -2481,7 +2783,7 @@ async def test_list_devices_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == device_manager.ListDevicesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListDevicesAsyncPager) @@ -2489,6 +2791,11 @@ async def test_list_devices_async(transport: str = "grpc_asyncio"): assert response.next_page_token == "next_page_token_value" +@pytest.mark.asyncio +async def test_list_devices_async_from_dict(): + await test_list_devices_async(request_type=dict) + + def test_list_devices_field_headers(): client = DeviceManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -2498,7 +2805,7 @@ def test_list_devices_field_headers(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_devices), "__call__") as call: + with mock.patch.object(type(client.transport.list_devices), "__call__") as call: call.return_value = device_manager.ListDevicesResponse() client.list_devices(request) @@ -2523,9 +2830,7 @@ async def test_list_devices_field_headers_async(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_devices), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_devices), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( device_manager.ListDevicesResponse() ) @@ -2546,7 +2851,7 @@ def test_list_devices_flattened(): client = DeviceManagerClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_devices), "__call__") as call: + with mock.patch.object(type(client.transport.list_devices), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = device_manager.ListDevicesResponse() @@ -2578,9 +2883,7 @@ async def test_list_devices_flattened_async(): client = DeviceManagerAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_devices), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_devices), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = device_manager.ListDevicesResponse() @@ -2615,7 +2918,7 @@ def test_list_devices_pager(): client = DeviceManagerClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_devices), "__call__") as call: + with mock.patch.object(type(client.transport.list_devices), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( device_manager.ListDevicesResponse( @@ -2649,7 +2952,7 @@ def test_list_devices_pages(): client = DeviceManagerClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_devices), "__call__") as call: + with mock.patch.object(type(client.transport.list_devices), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( device_manager.ListDevicesResponse( @@ -2676,9 +2979,7 @@ async def test_list_devices_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_devices), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_devices), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -2711,9 +3012,7 @@ async def test_list_devices_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_devices), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_devices), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -2751,7 +3050,7 @@ def test_modify_cloud_to_device_config( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.modify_cloud_to_device_config), "__call__" + type(client.transport.modify_cloud_to_device_config), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DeviceConfig( @@ -2767,6 +3066,7 @@ def test_modify_cloud_to_device_config( assert args[0] == device_manager.ModifyCloudToDeviceConfigRequest() # Establish that the response is the type that we expect. + assert isinstance(response, resources.DeviceConfig) assert response.version == 774 @@ -2778,19 +3078,40 @@ def test_modify_cloud_to_device_config_from_dict(): test_modify_cloud_to_device_config(request_type=dict) +def test_modify_cloud_to_device_config_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeviceManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.modify_cloud_to_device_config), "__call__" + ) as call: + client.modify_cloud_to_device_config() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == device_manager.ModifyCloudToDeviceConfigRequest() + + @pytest.mark.asyncio -async def test_modify_cloud_to_device_config_async(transport: str = "grpc_asyncio"): +async def test_modify_cloud_to_device_config_async( + transport: str = "grpc_asyncio", + request_type=device_manager.ModifyCloudToDeviceConfigRequest, +): client = DeviceManagerAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = device_manager.ModifyCloudToDeviceConfigRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.modify_cloud_to_device_config), "__call__" + type(client.transport.modify_cloud_to_device_config), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -2803,7 +3124,7 @@ async def test_modify_cloud_to_device_config_async(transport: str = "grpc_asynci assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == device_manager.ModifyCloudToDeviceConfigRequest() # Establish that the response is the type that we expect. assert isinstance(response, resources.DeviceConfig) @@ -2813,6 +3134,11 @@ async def test_modify_cloud_to_device_config_async(transport: str = "grpc_asynci assert response.binary_data == b"binary_data_blob" +@pytest.mark.asyncio +async def test_modify_cloud_to_device_config_async_from_dict(): + await test_modify_cloud_to_device_config_async(request_type=dict) + + def test_modify_cloud_to_device_config_field_headers(): client = DeviceManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -2823,7 +3149,7 @@ def test_modify_cloud_to_device_config_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.modify_cloud_to_device_config), "__call__" + type(client.transport.modify_cloud_to_device_config), "__call__" ) as call: call.return_value = resources.DeviceConfig() @@ -2850,7 +3176,7 @@ async def test_modify_cloud_to_device_config_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.modify_cloud_to_device_config), "__call__" + type(client.transport.modify_cloud_to_device_config), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( resources.DeviceConfig() @@ -2873,7 +3199,7 @@ def test_modify_cloud_to_device_config_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.modify_cloud_to_device_config), "__call__" + type(client.transport.modify_cloud_to_device_config), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DeviceConfig() @@ -2913,7 +3239,7 @@ async def test_modify_cloud_to_device_config_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.modify_cloud_to_device_config), "__call__" + type(client.transport.modify_cloud_to_device_config), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = resources.DeviceConfig() @@ -2964,7 +3290,7 @@ def test_list_device_config_versions( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_device_config_versions), "__call__" + type(client.transport.list_device_config_versions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = device_manager.ListDeviceConfigVersionsResponse() @@ -2978,6 +3304,7 @@ def test_list_device_config_versions( assert args[0] == device_manager.ListDeviceConfigVersionsRequest() # Establish that the response is the type that we expect. + assert isinstance(response, device_manager.ListDeviceConfigVersionsResponse) @@ -2985,19 +3312,40 @@ def test_list_device_config_versions_from_dict(): test_list_device_config_versions(request_type=dict) +def test_list_device_config_versions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeviceManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_device_config_versions), "__call__" + ) as call: + client.list_device_config_versions() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == device_manager.ListDeviceConfigVersionsRequest() + + @pytest.mark.asyncio -async def test_list_device_config_versions_async(transport: str = "grpc_asyncio"): +async def test_list_device_config_versions_async( + transport: str = "grpc_asyncio", + request_type=device_manager.ListDeviceConfigVersionsRequest, +): client = DeviceManagerAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = device_manager.ListDeviceConfigVersionsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_device_config_versions), "__call__" + type(client.transport.list_device_config_versions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -3010,12 +3358,17 @@ async def test_list_device_config_versions_async(transport: str = "grpc_asyncio" assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == device_manager.ListDeviceConfigVersionsRequest() # Establish that the response is the type that we expect. assert isinstance(response, device_manager.ListDeviceConfigVersionsResponse) +@pytest.mark.asyncio +async def test_list_device_config_versions_async_from_dict(): + await test_list_device_config_versions_async(request_type=dict) + + def test_list_device_config_versions_field_headers(): client = DeviceManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -3026,7 +3379,7 @@ def test_list_device_config_versions_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_device_config_versions), "__call__" + type(client.transport.list_device_config_versions), "__call__" ) as call: call.return_value = device_manager.ListDeviceConfigVersionsResponse() @@ -3053,7 +3406,7 @@ async def test_list_device_config_versions_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_device_config_versions), "__call__" + type(client.transport.list_device_config_versions), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( device_manager.ListDeviceConfigVersionsResponse() @@ -3076,7 +3429,7 @@ def test_list_device_config_versions_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_device_config_versions), "__call__" + type(client.transport.list_device_config_versions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = device_manager.ListDeviceConfigVersionsResponse() @@ -3110,7 +3463,7 @@ async def test_list_device_config_versions_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_device_config_versions), "__call__" + type(client.transport.list_device_config_versions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = device_manager.ListDeviceConfigVersionsResponse() @@ -3155,7 +3508,7 @@ def test_list_device_states( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_device_states), "__call__" + type(client.transport.list_device_states), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = device_manager.ListDeviceStatesResponse() @@ -3169,6 +3522,7 @@ def test_list_device_states( assert args[0] == device_manager.ListDeviceStatesRequest() # Establish that the response is the type that we expect. + assert isinstance(response, device_manager.ListDeviceStatesResponse) @@ -3176,19 +3530,39 @@ def test_list_device_states_from_dict(): test_list_device_states(request_type=dict) +def test_list_device_states_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeviceManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_device_states), "__call__" + ) as call: + client.list_device_states() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == device_manager.ListDeviceStatesRequest() + + @pytest.mark.asyncio -async def test_list_device_states_async(transport: str = "grpc_asyncio"): +async def test_list_device_states_async( + transport: str = "grpc_asyncio", request_type=device_manager.ListDeviceStatesRequest +): client = DeviceManagerAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = device_manager.ListDeviceStatesRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_device_states), "__call__" + type(client.transport.list_device_states), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -3201,12 +3575,17 @@ async def test_list_device_states_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == device_manager.ListDeviceStatesRequest() # Establish that the response is the type that we expect. assert isinstance(response, device_manager.ListDeviceStatesResponse) +@pytest.mark.asyncio +async def test_list_device_states_async_from_dict(): + await test_list_device_states_async(request_type=dict) + + def test_list_device_states_field_headers(): client = DeviceManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -3217,7 +3596,7 @@ def test_list_device_states_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_device_states), "__call__" + type(client.transport.list_device_states), "__call__" ) as call: call.return_value = device_manager.ListDeviceStatesResponse() @@ -3244,7 +3623,7 @@ async def test_list_device_states_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_device_states), "__call__" + type(client.transport.list_device_states), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( device_manager.ListDeviceStatesResponse() @@ -3267,7 +3646,7 @@ def test_list_device_states_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_device_states), "__call__" + type(client.transport.list_device_states), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = device_manager.ListDeviceStatesResponse() @@ -3301,7 +3680,7 @@ async def test_list_device_states_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_device_states), "__call__" + type(client.transport.list_device_states), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = device_manager.ListDeviceStatesResponse() @@ -3345,7 +3724,7 @@ def test_set_iam_policy( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy(version=774, etag=b"etag_blob",) @@ -3358,6 +3737,7 @@ def test_set_iam_policy( assert args[0] == iam_policy.SetIamPolicyRequest() # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) assert response.version == 774 @@ -3369,20 +3749,36 @@ def test_set_iam_policy_from_dict(): test_set_iam_policy(request_type=dict) +def test_set_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeviceManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + client.set_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == iam_policy.SetIamPolicyRequest() + + @pytest.mark.asyncio -async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): +async def test_set_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy.SetIamPolicyRequest +): client = DeviceManagerAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.SetIamPolicyRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.set_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policy.Policy(version=774, etag=b"etag_blob",) @@ -3394,7 +3790,7 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == iam_policy.SetIamPolicyRequest() # Establish that the response is the type that we expect. assert isinstance(response, policy.Policy) @@ -3404,6 +3800,11 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"): assert response.etag == b"etag_blob" +@pytest.mark.asyncio +async def test_set_iam_policy_async_from_dict(): + await test_set_iam_policy_async(request_type=dict) + + def test_set_iam_policy_field_headers(): client = DeviceManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -3413,7 +3814,7 @@ def test_set_iam_policy_field_headers(): request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: call.return_value = policy.Policy() client.set_iam_policy(request) @@ -3438,9 +3839,7 @@ async def test_set_iam_policy_field_headers_async(): request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.set_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) await client.set_iam_policy(request) @@ -3455,10 +3854,10 @@ async def test_set_iam_policy_field_headers_async(): assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -def test_set_iam_policy_from_dict(): +def test_set_iam_policy_from_dict_foreign(): client = DeviceManagerClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy() @@ -3475,7 +3874,7 @@ def test_set_iam_policy_flattened(): client = DeviceManagerClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy() @@ -3507,9 +3906,7 @@ async def test_set_iam_policy_flattened_async(): client = DeviceManagerAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.set_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy() @@ -3550,7 +3947,7 @@ def test_get_iam_policy( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy(version=774, etag=b"etag_blob",) @@ -3563,6 +3960,7 @@ def test_get_iam_policy( assert args[0] == iam_policy.GetIamPolicyRequest() # Establish that the response is the type that we expect. + assert isinstance(response, policy.Policy) assert response.version == 774 @@ -3574,20 +3972,36 @@ def test_get_iam_policy_from_dict(): test_get_iam_policy(request_type=dict) +def test_get_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeviceManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + client.get_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == iam_policy.GetIamPolicyRequest() + + @pytest.mark.asyncio -async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): +async def test_get_iam_policy_async( + transport: str = "grpc_asyncio", request_type=iam_policy.GetIamPolicyRequest +): client = DeviceManagerAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.GetIamPolicyRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( policy.Policy(version=774, etag=b"etag_blob",) @@ -3599,7 +4013,7 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == iam_policy.GetIamPolicyRequest() # Establish that the response is the type that we expect. assert isinstance(response, policy.Policy) @@ -3609,6 +4023,11 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"): assert response.etag == b"etag_blob" +@pytest.mark.asyncio +async def test_get_iam_policy_async_from_dict(): + await test_get_iam_policy_async(request_type=dict) + + def test_get_iam_policy_field_headers(): client = DeviceManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -3618,7 +4037,7 @@ def test_get_iam_policy_field_headers(): request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: call.return_value = policy.Policy() client.get_iam_policy(request) @@ -3643,9 +4062,7 @@ async def test_get_iam_policy_field_headers_async(): request.resource = "resource/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy()) await client.get_iam_policy(request) @@ -3660,10 +4077,10 @@ async def test_get_iam_policy_field_headers_async(): assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -def test_get_iam_policy_from_dict(): +def test_get_iam_policy_from_dict_foreign(): client = DeviceManagerClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy() @@ -3680,7 +4097,7 @@ def test_get_iam_policy_flattened(): client = DeviceManagerClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy() @@ -3712,9 +4129,7 @@ async def test_get_iam_policy_flattened_async(): client = DeviceManagerAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_iam_policy), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = policy.Policy() @@ -3756,7 +4171,7 @@ def test_test_iam_permissions( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = iam_policy.TestIamPermissionsResponse( @@ -3772,6 +4187,7 @@ def test_test_iam_permissions( assert args[0] == iam_policy.TestIamPermissionsRequest() # Establish that the response is the type that we expect. + assert isinstance(response, iam_policy.TestIamPermissionsResponse) assert response.permissions == ["permissions_value"] @@ -3781,19 +4197,39 @@ def test_test_iam_permissions_from_dict(): test_test_iam_permissions(request_type=dict) +def test_test_iam_permissions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeviceManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + client.test_iam_permissions() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == iam_policy.TestIamPermissionsRequest() + + @pytest.mark.asyncio -async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): +async def test_test_iam_permissions_async( + transport: str = "grpc_asyncio", request_type=iam_policy.TestIamPermissionsRequest +): client = DeviceManagerAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = iam_policy.TestIamPermissionsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -3806,7 +4242,7 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == iam_policy.TestIamPermissionsRequest() # Establish that the response is the type that we expect. assert isinstance(response, iam_policy.TestIamPermissionsResponse) @@ -3814,6 +4250,11 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"): assert response.permissions == ["permissions_value"] +@pytest.mark.asyncio +async def test_test_iam_permissions_async_from_dict(): + await test_test_iam_permissions_async(request_type=dict) + + def test_test_iam_permissions_field_headers(): client = DeviceManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -3824,7 +4265,7 @@ def test_test_iam_permissions_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: call.return_value = iam_policy.TestIamPermissionsResponse() @@ -3851,7 +4292,7 @@ async def test_test_iam_permissions_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( iam_policy.TestIamPermissionsResponse() @@ -3869,11 +4310,11 @@ async def test_test_iam_permissions_field_headers_async(): assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"] -def test_test_iam_permissions_from_dict(): +def test_test_iam_permissions_from_dict_foreign(): client = DeviceManagerClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = iam_policy.TestIamPermissionsResponse() @@ -3892,7 +4333,7 @@ def test_test_iam_permissions_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = iam_policy.TestIamPermissionsResponse() @@ -3932,7 +4373,7 @@ async def test_test_iam_permissions_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.test_iam_permissions), "__call__" + type(client.transport.test_iam_permissions), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = iam_policy.TestIamPermissionsResponse() @@ -3983,7 +4424,7 @@ def test_send_command_to_device( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.send_command_to_device), "__call__" + type(client.transport.send_command_to_device), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = device_manager.SendCommandToDeviceResponse() @@ -3997,6 +4438,7 @@ def test_send_command_to_device( assert args[0] == device_manager.SendCommandToDeviceRequest() # Establish that the response is the type that we expect. + assert isinstance(response, device_manager.SendCommandToDeviceResponse) @@ -4004,19 +4446,40 @@ def test_send_command_to_device_from_dict(): test_send_command_to_device(request_type=dict) +def test_send_command_to_device_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeviceManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.send_command_to_device), "__call__" + ) as call: + client.send_command_to_device() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == device_manager.SendCommandToDeviceRequest() + + @pytest.mark.asyncio -async def test_send_command_to_device_async(transport: str = "grpc_asyncio"): +async def test_send_command_to_device_async( + transport: str = "grpc_asyncio", + request_type=device_manager.SendCommandToDeviceRequest, +): client = DeviceManagerAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = device_manager.SendCommandToDeviceRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.send_command_to_device), "__call__" + type(client.transport.send_command_to_device), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -4029,12 +4492,17 @@ async def test_send_command_to_device_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == device_manager.SendCommandToDeviceRequest() # Establish that the response is the type that we expect. assert isinstance(response, device_manager.SendCommandToDeviceResponse) +@pytest.mark.asyncio +async def test_send_command_to_device_async_from_dict(): + await test_send_command_to_device_async(request_type=dict) + + def test_send_command_to_device_field_headers(): client = DeviceManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -4045,7 +4513,7 @@ def test_send_command_to_device_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.send_command_to_device), "__call__" + type(client.transport.send_command_to_device), "__call__" ) as call: call.return_value = device_manager.SendCommandToDeviceResponse() @@ -4072,7 +4540,7 @@ async def test_send_command_to_device_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.send_command_to_device), "__call__" + type(client.transport.send_command_to_device), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( device_manager.SendCommandToDeviceResponse() @@ -4095,7 +4563,7 @@ def test_send_command_to_device_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.send_command_to_device), "__call__" + type(client.transport.send_command_to_device), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = device_manager.SendCommandToDeviceResponse() @@ -4140,7 +4608,7 @@ async def test_send_command_to_device_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.send_command_to_device), "__call__" + type(client.transport.send_command_to_device), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = device_manager.SendCommandToDeviceResponse() @@ -4196,7 +4664,7 @@ def test_bind_device_to_gateway( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.bind_device_to_gateway), "__call__" + type(client.transport.bind_device_to_gateway), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = device_manager.BindDeviceToGatewayResponse() @@ -4210,6 +4678,7 @@ def test_bind_device_to_gateway( assert args[0] == device_manager.BindDeviceToGatewayRequest() # Establish that the response is the type that we expect. + assert isinstance(response, device_manager.BindDeviceToGatewayResponse) @@ -4217,19 +4686,40 @@ def test_bind_device_to_gateway_from_dict(): test_bind_device_to_gateway(request_type=dict) +def test_bind_device_to_gateway_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeviceManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.bind_device_to_gateway), "__call__" + ) as call: + client.bind_device_to_gateway() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == device_manager.BindDeviceToGatewayRequest() + + @pytest.mark.asyncio -async def test_bind_device_to_gateway_async(transport: str = "grpc_asyncio"): +async def test_bind_device_to_gateway_async( + transport: str = "grpc_asyncio", + request_type=device_manager.BindDeviceToGatewayRequest, +): client = DeviceManagerAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = device_manager.BindDeviceToGatewayRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.bind_device_to_gateway), "__call__" + type(client.transport.bind_device_to_gateway), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -4242,12 +4732,17 @@ async def test_bind_device_to_gateway_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == device_manager.BindDeviceToGatewayRequest() # Establish that the response is the type that we expect. assert isinstance(response, device_manager.BindDeviceToGatewayResponse) +@pytest.mark.asyncio +async def test_bind_device_to_gateway_async_from_dict(): + await test_bind_device_to_gateway_async(request_type=dict) + + def test_bind_device_to_gateway_field_headers(): client = DeviceManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -4258,7 +4753,7 @@ def test_bind_device_to_gateway_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.bind_device_to_gateway), "__call__" + type(client.transport.bind_device_to_gateway), "__call__" ) as call: call.return_value = device_manager.BindDeviceToGatewayResponse() @@ -4285,7 +4780,7 @@ async def test_bind_device_to_gateway_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.bind_device_to_gateway), "__call__" + type(client.transport.bind_device_to_gateway), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( device_manager.BindDeviceToGatewayResponse() @@ -4308,7 +4803,7 @@ def test_bind_device_to_gateway_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.bind_device_to_gateway), "__call__" + type(client.transport.bind_device_to_gateway), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = device_manager.BindDeviceToGatewayResponse() @@ -4353,7 +4848,7 @@ async def test_bind_device_to_gateway_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.bind_device_to_gateway), "__call__" + type(client.transport.bind_device_to_gateway), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = device_manager.BindDeviceToGatewayResponse() @@ -4409,7 +4904,7 @@ def test_unbind_device_from_gateway( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.unbind_device_from_gateway), "__call__" + type(client.transport.unbind_device_from_gateway), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = device_manager.UnbindDeviceFromGatewayResponse() @@ -4423,6 +4918,7 @@ def test_unbind_device_from_gateway( assert args[0] == device_manager.UnbindDeviceFromGatewayRequest() # Establish that the response is the type that we expect. + assert isinstance(response, device_manager.UnbindDeviceFromGatewayResponse) @@ -4430,19 +4926,40 @@ def test_unbind_device_from_gateway_from_dict(): test_unbind_device_from_gateway(request_type=dict) +def test_unbind_device_from_gateway_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DeviceManagerClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.unbind_device_from_gateway), "__call__" + ) as call: + client.unbind_device_from_gateway() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == device_manager.UnbindDeviceFromGatewayRequest() + + @pytest.mark.asyncio -async def test_unbind_device_from_gateway_async(transport: str = "grpc_asyncio"): +async def test_unbind_device_from_gateway_async( + transport: str = "grpc_asyncio", + request_type=device_manager.UnbindDeviceFromGatewayRequest, +): client = DeviceManagerAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = device_manager.UnbindDeviceFromGatewayRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.unbind_device_from_gateway), "__call__" + type(client.transport.unbind_device_from_gateway), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -4455,12 +4972,17 @@ async def test_unbind_device_from_gateway_async(transport: str = "grpc_asyncio") assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == device_manager.UnbindDeviceFromGatewayRequest() # Establish that the response is the type that we expect. assert isinstance(response, device_manager.UnbindDeviceFromGatewayResponse) +@pytest.mark.asyncio +async def test_unbind_device_from_gateway_async_from_dict(): + await test_unbind_device_from_gateway_async(request_type=dict) + + def test_unbind_device_from_gateway_field_headers(): client = DeviceManagerClient(credentials=credentials.AnonymousCredentials(),) @@ -4471,7 +4993,7 @@ def test_unbind_device_from_gateway_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.unbind_device_from_gateway), "__call__" + type(client.transport.unbind_device_from_gateway), "__call__" ) as call: call.return_value = device_manager.UnbindDeviceFromGatewayResponse() @@ -4498,7 +5020,7 @@ async def test_unbind_device_from_gateway_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.unbind_device_from_gateway), "__call__" + type(client.transport.unbind_device_from_gateway), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( device_manager.UnbindDeviceFromGatewayResponse() @@ -4521,7 +5043,7 @@ def test_unbind_device_from_gateway_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.unbind_device_from_gateway), "__call__" + type(client.transport.unbind_device_from_gateway), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = device_manager.UnbindDeviceFromGatewayResponse() @@ -4566,7 +5088,7 @@ async def test_unbind_device_from_gateway_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.unbind_device_from_gateway), "__call__" + type(client.transport.unbind_device_from_gateway), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = device_manager.UnbindDeviceFromGatewayResponse() @@ -4645,7 +5167,7 @@ def test_transport_instance(): credentials=credentials.AnonymousCredentials(), ) client = DeviceManagerClient(transport=transport) - assert client._transport is transport + assert client.transport is transport def test_transport_get_channel(): @@ -4663,10 +5185,25 @@ def test_transport_get_channel(): assert channel +@pytest.mark.parametrize( + "transport_class", + [ + transports.DeviceManagerGrpcTransport, + transports.DeviceManagerGrpcAsyncIOTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = DeviceManagerClient(credentials=credentials.AnonymousCredentials(),) - assert isinstance(client._transport, transports.DeviceManagerGrpcTransport,) + assert isinstance(client.transport, transports.DeviceManagerGrpcTransport,) def test_device_manager_base_transport_error(): @@ -4738,6 +5275,17 @@ def test_device_manager_base_transport_with_credentials_file(): ) +def test_device_manager_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(auth, "default") as adc, mock.patch( + "google.cloud.iot_v1.services.device_manager.transports.DeviceManagerTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.DeviceManagerTransport() + adc.assert_called_once() + + def test_device_manager_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(auth, "default") as adc: @@ -4769,6 +5317,54 @@ def test_device_manager_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.DeviceManagerGrpcTransport, + transports.DeviceManagerGrpcAsyncIOTransport, + ], +) +def test_device_manager_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloudiot", + ), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + def test_device_manager_host_no_port(): client = DeviceManagerClient( credentials=credentials.AnonymousCredentials(), @@ -4776,7 +5372,7 @@ def test_device_manager_host_no_port(): api_endpoint="cloudiot.googleapis.com" ), ) - assert client._transport._host == "cloudiot.googleapis.com:443" + assert client.transport._host == "cloudiot.googleapis.com:443" def test_device_manager_host_with_port(): @@ -4786,203 +5382,168 @@ def test_device_manager_host_with_port(): api_endpoint="cloudiot.googleapis.com:8000" ), ) - assert client._transport._host == "cloudiot.googleapis.com:8000" + assert client.transport._host == "cloudiot.googleapis.com:8000" def test_device_manager_grpc_transport_channel(): - channel = grpc.insecure_channel("http://localhost/") + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) - # Check that if channel is provided, mtls endpoint and client_cert_source - # won't be used. - callback = mock.MagicMock() + # Check that channel is used if provided. transport = transports.DeviceManagerGrpcTransport( - host="squid.clam.whelk", - channel=channel, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=callback, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" - assert not callback.called + assert transport._ssl_channel_credentials == None def test_device_manager_grpc_asyncio_transport_channel(): - channel = aio.insecure_channel("http://localhost/") + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - # Check that if channel is provided, mtls endpoint and client_cert_source - # won't be used. - callback = mock.MagicMock() + # Check that channel is used if provided. transport = transports.DeviceManagerGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=callback, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" - assert not callback.called - - -@mock.patch("grpc.ssl_channel_credentials", autospec=True) -@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) -def test_device_manager_grpc_transport_channel_mtls_with_client_cert_source( - grpc_create_channel, grpc_ssl_channel_cred -): - # Check that if channel is None, but api_mtls_endpoint and client_cert_source - # are provided, then a mTLS channel will be created. - mock_cred = mock.Mock() - - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - transport = transports.DeviceManagerGrpcTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloudiot", - ), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel - - -@mock.patch("grpc.ssl_channel_credentials", autospec=True) -@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) -def test_device_manager_grpc_asyncio_transport_channel_mtls_with_client_cert_source( - grpc_create_channel, grpc_ssl_channel_cred -): - # Check that if channel is None, but api_mtls_endpoint and client_cert_source - # are provided, then a mTLS channel will be created. - mock_cred = mock.Mock() - - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - transport = transports.DeviceManagerGrpcAsyncIOTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloudiot", - ), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == None +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( - "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] + "transport_class", + [ + transports.DeviceManagerGrpcTransport, + transports.DeviceManagerGrpcAsyncIOTransport, + ], ) -@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) -def test_device_manager_grpc_transport_channel_mtls_with_adc( - grpc_create_channel, api_mtls_endpoint -): - # Check that if channel and client_cert_source are None, but api_mtls_endpoint - # is provided, then a mTLS channel will be created with SSL ADC. - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - # Mock google.auth.transport.grpc.SslCredentials class. - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - mock_cred = mock.Mock() - transport = transports.DeviceManagerGrpcTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint=api_mtls_endpoint, - client_cert_source=None, - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloudiot", - ), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel +def test_device_manager_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloudiot", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( - "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] + "transport_class", + [ + transports.DeviceManagerGrpcTransport, + transports.DeviceManagerGrpcAsyncIOTransport, + ], ) -@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) -def test_device_manager_grpc_asyncio_transport_channel_mtls_with_adc( - grpc_create_channel, api_mtls_endpoint -): - # Check that if channel and client_cert_source are None, but api_mtls_endpoint - # is provided, then a mTLS channel will be created with SSL ADC. - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - # Mock google.auth.transport.grpc.SslCredentials class. +def test_device_manager_transport_channel_mtls_with_adc(transport_class): mock_ssl_cred = mock.Mock() with mock.patch.multiple( "google.auth.transport.grpc.SslCredentials", __init__=mock.Mock(return_value=None), ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): - mock_cred = mock.Mock() - transport = transports.DeviceManagerGrpcAsyncIOTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint=api_mtls_endpoint, - client_cert_source=None, - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/cloudiot", - ), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/cloudiot", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel -def test_registry_path(): +def test_device_path(): project = "squid" location = "clam" registry = "whelk" + device = "octopus" + + expected = "projects/{project}/locations/{location}/registries/{registry}/devices/{device}".format( + project=project, location=location, registry=registry, device=device, + ) + actual = DeviceManagerClient.device_path(project, location, registry, device) + assert expected == actual + + +def test_parse_device_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "registry": "cuttlefish", + "device": "mussel", + } + path = DeviceManagerClient.device_path(**expected) + + # Check that the path construction is reversible. + actual = DeviceManagerClient.parse_device_path(path) + assert expected == actual + + +def test_registry_path(): + project = "winkle" + location = "nautilus" + registry = "scallop" expected = "projects/{project}/locations/{location}/registries/{registry}".format( project=project, location=location, registry=registry, @@ -4993,9 +5554,9 @@ def test_registry_path(): def test_parse_registry_path(): expected = { - "project": "octopus", - "location": "oyster", - "registry": "nudibranch", + "project": "abalone", + "location": "squid", + "registry": "clam", } path = DeviceManagerClient.registry_path(**expected) @@ -5004,30 +5565,104 @@ def test_parse_registry_path(): assert expected == actual -def test_device_path(): - project = "squid" - location = "clam" - registry = "whelk" - device = "octopus" +def test_common_billing_account_path(): + billing_account = "whelk" - expected = "projects/{project}/locations/{location}/registries/{registry}/devices/{device}".format( - project=project, location=location, registry=registry, device=device, + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, ) - actual = DeviceManagerClient.device_path(project, location, registry, device) + actual = DeviceManagerClient.common_billing_account_path(billing_account) assert expected == actual -def test_parse_device_path(): +def test_parse_common_billing_account_path(): expected = { - "project": "oyster", - "location": "nudibranch", - "registry": "cuttlefish", - "device": "mussel", + "billing_account": "octopus", } - path = DeviceManagerClient.device_path(**expected) + path = DeviceManagerClient.common_billing_account_path(**expected) # Check that the path construction is reversible. - actual = DeviceManagerClient.parse_device_path(path) + actual = DeviceManagerClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "oyster" + + expected = "folders/{folder}".format(folder=folder,) + actual = DeviceManagerClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "nudibranch", + } + path = DeviceManagerClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DeviceManagerClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "cuttlefish" + + expected = "organizations/{organization}".format(organization=organization,) + actual = DeviceManagerClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "mussel", + } + path = DeviceManagerClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DeviceManagerClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "winkle" + + expected = "projects/{project}".format(project=project,) + actual = DeviceManagerClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nautilus", + } + path = DeviceManagerClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DeviceManagerClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "scallop" + location = "abalone" + + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + actual = DeviceManagerClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "squid", + "location": "clam", + } + path = DeviceManagerClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DeviceManagerClient.parse_common_location_path(path) assert expected == actual