diff --git a/.github/header-checker-lint.yml b/.github/header-checker-lint.yml new file mode 100644 index 0000000..fc281c0 --- /dev/null +++ b/.github/header-checker-lint.yml @@ -0,0 +1,15 @@ +{"allowedCopyrightHolders": ["Google LLC"], + "allowedLicenses": ["Apache-2.0", "MIT", "BSD-3"], + "ignoreFiles": ["**/requirements.txt", "**/requirements-test.txt"], + "sourceFileExtensions": [ + "ts", + "js", + "java", + "sh", + "Dockerfile", + "yaml", + "py", + "html", + "txt" + ] +} \ No newline at end of file diff --git a/.gitignore b/.gitignore index b9daa52..b4243ce 100644 --- a/.gitignore +++ b/.gitignore @@ -50,8 +50,10 @@ docs.metadata # Virtual environment env/ + +# Test logs coverage.xml -sponge_log.xml +*sponge_log.xml # System test environment variables. system_tests/local_test_setup diff --git a/.kokoro/build.sh b/.kokoro/build.sh index a4d6015..a2916af 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -15,7 +15,11 @@ set -eo pipefail -cd github/python-os-config +if [[ -z "${PROJECT_ROOT:-}" ]]; then + PROJECT_ROOT="github/python-os-config" +fi + +cd "${PROJECT_ROOT}" # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -30,16 +34,26 @@ export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/service-account.json export PROJECT_ID=$(cat "${KOKORO_GFILE_DIR}/project-id.json") # Remove old nox -python3.6 -m pip uninstall --yes --quiet nox-automation +python3 -m pip uninstall --yes --quiet nox-automation # Install nox -python3.6 -m pip install --upgrade --quiet nox -python3.6 -m nox --version +python3 -m pip install --upgrade --quiet nox +python3 -m nox --version + +# If this is a continuous build, send the test log to the FlakyBot. +# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then + cleanup() { + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + } + trap cleanup EXIT HUP +fi # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then - python3.6 -m nox -s "${NOX_SESSION:-}" + python3 -m nox -s ${NOX_SESSION:-} else - python3.6 -m nox + python3 -m nox fi diff --git a/.kokoro/docs/docs-presubmit.cfg b/.kokoro/docs/docs-presubmit.cfg index 1118107..413095e 100644 --- a/.kokoro/docs/docs-presubmit.cfg +++ b/.kokoro/docs/docs-presubmit.cfg @@ -15,3 +15,14 @@ env_vars: { key: "TRAMPOLINE_IMAGE_UPLOAD" value: "false" } + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-os-config/.kokoro/build.sh" +} + +# Only run this nox session. +env_vars: { + key: "NOX_SESSION" + value: "docs docfx" +} diff --git a/.kokoro/samples/python3.6/periodic-head.cfg b/.kokoro/samples/python3.6/periodic-head.cfg new file mode 100644 index 0000000..f9cfcd3 --- /dev/null +++ b/.kokoro/samples/python3.6/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.7/periodic-head.cfg b/.kokoro/samples/python3.7/periodic-head.cfg new file mode 100644 index 0000000..f9cfcd3 --- /dev/null +++ b/.kokoro/samples/python3.7/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.8/periodic-head.cfg b/.kokoro/samples/python3.8/periodic-head.cfg new file mode 100644 index 0000000..f9cfcd3 --- /dev/null +++ b/.kokoro/samples/python3.8/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh new file mode 100755 index 0000000..765b761 --- /dev/null +++ b/.kokoro/test-samples-against-head.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A customized test runner for samples. +# +# For periodic builds, you can specify this file for testing against head. + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +cd github/python-os-config + +exec .kokoro/test-samples-impl.sh diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh new file mode 100755 index 0000000..cf5de74 --- /dev/null +++ b/.kokoro/test-samples-impl.sh @@ -0,0 +1,102 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +# Exit early if samples directory doesn't exist +if [ ! -d "./samples" ]; then + echo "No tests run. `./samples` not found" + exit 0 +fi + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Install nox +python3.6 -m pip install --upgrade --quiet nox + +# Use secrets acessor service account to get secrets +if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then + gcloud auth activate-service-account \ + --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ + --project="cloud-devrel-kokoro-resources" +fi + +# This script will create 3 files: +# - testing/test-env.sh +# - testing/service-account.json +# - testing/client-secrets.json +./scripts/decrypt-secrets.sh + +source ./testing/test-env.sh +export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json + +# For cloud-run session, we activate the service account for gcloud sdk. +gcloud auth activate-service-account \ + --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" + +export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json + +echo -e "\n******************** TESTING PROJECTS ********************" + +# Switch to 'fail at end' to allow all tests to complete before exiting. +set +e +# Use RTN to return a non-zero value if the test fails. +RTN=0 +ROOT=$(pwd) +# Find all requirements.txt in the samples directory (may break on whitespace). +for file in samples/**/requirements.txt; do + cd "$ROOT" + # Navigate to the project folder. + file=$(dirname "$file") + cd "$file" + + echo "------------------------------------------------------------" + echo "- testing $file" + echo "------------------------------------------------------------" + + # Use nox to execute the tests for the project. + python3.6 -m nox -s "$RUN_TESTS_SESSION" + EXIT=$? + + # If this is a periodic build, send the test log to the FlakyBot. + # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. + if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + fi + + if [[ $EXIT -ne 0 ]]; then + RTN=1 + echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" + else + echo -e "\n Testing completed.\n" + fi + +done +cd "$ROOT" + +# Workaround for Kokoro permissions issue: delete secrets +rm testing/{test-env.sh,client-secrets.json,service-account.json} + +exit "$RTN" diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh index cd3a49e..6364ca9 100755 --- a/.kokoro/test-samples.sh +++ b/.kokoro/test-samples.sh @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +# The default test runner for samples. +# +# For periodic builds, we rewinds the repo to the latest release, and +# run test-samples-impl.sh. # `-e` enables the script to automatically fail when a command fails # `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero @@ -24,87 +28,19 @@ cd github/python-os-config # Run periodic samples tests at latest release if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + # preserving the test runner implementation. + cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh" + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + echo "Now we rewind the repo back to the latest release..." LATEST_RELEASE=$(git describe --abbrev=0 --tags) git checkout $LATEST_RELEASE -fi - -# Exit early if samples directory doesn't exist -if [ ! -d "./samples" ]; then - echo "No tests run. `./samples` not found" - exit 0 -fi - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Debug: show build environment -env | grep KOKORO - -# Install nox -python3.6 -m pip install --upgrade --quiet nox - -# Use secrets acessor service account to get secrets -if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then - gcloud auth activate-service-account \ - --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ - --project="cloud-devrel-kokoro-resources" -fi - -# This script will create 3 files: -# - testing/test-env.sh -# - testing/service-account.json -# - testing/client-secrets.json -./scripts/decrypt-secrets.sh - -source ./testing/test-env.sh -export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json - -# For cloud-run session, we activate the service account for gcloud sdk. -gcloud auth activate-service-account \ - --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" - -export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json - -echo -e "\n******************** TESTING PROJECTS ********************" - -# Switch to 'fail at end' to allow all tests to complete before exiting. -set +e -# Use RTN to return a non-zero value if the test fails. -RTN=0 -ROOT=$(pwd) -# Find all requirements.txt in the samples directory (may break on whitespace). -for file in samples/**/requirements.txt; do - cd "$ROOT" - # Navigate to the project folder. - file=$(dirname "$file") - cd "$file" - - echo "------------------------------------------------------------" - echo "- testing $file" - echo "------------------------------------------------------------" - - # Use nox to execute the tests for the project. - python3.6 -m nox -s "$RUN_TESTS_SESSION" - EXIT=$? - - # If this is a periodic build, send the test log to the FlakyBot. - # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. - if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot - $KOKORO_GFILE_DIR/linux_amd64/flakybot + echo "The current head is: " + echo $(git rev-parse --verify HEAD) + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + # move back the test runner implementation if there's no file. + if [ ! -f .kokoro/test-samples-impl.sh ]; then + cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh fi +fi - if [[ $EXIT -ne 0 ]]; then - RTN=1 - echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" - else - echo -e "\n Testing completed.\n" - fi - -done -cd "$ROOT" - -# Workaround for Kokoro permissions issue: delete secrets -rm testing/{test-env.sh,client-secrets.json,service-account.json} - -exit "$RTN" +exec .kokoro/test-samples-impl.sh diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a9024b1..32302e4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -12,6 +12,6 @@ repos: hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 - rev: 3.8.4 + rev: 3.9.0 hooks: - id: flake8 diff --git a/.trampolinerc b/.trampolinerc index 995ee29..383b6ec 100644 --- a/.trampolinerc +++ b/.trampolinerc @@ -24,6 +24,7 @@ required_envvars+=( pass_down_envvars+=( "STAGING_BUCKET" "V2_STAGING_BUCKET" + "NOX_SESSION" ) # Prevent unintentional override on the default image. diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index f1e0312..49adb42 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -70,9 +70,14 @@ We use `nox `__ to instrument our tests. - To test your changes, run unit tests with ``nox``:: $ nox -s unit-2.7 - $ nox -s unit-3.7 + $ nox -s unit-3.8 $ ... +- Args to pytest can be passed through the nox command separated by a `--`. For + example, to run a single test:: + + $ nox -s unit-3.8 -- -k + .. note:: The unit tests and system tests are described in the @@ -93,8 +98,12 @@ On Debian/Ubuntu:: ************ Coding Style ************ +- We use the automatic code formatter ``black``. You can run it using + the nox session ``blacken``. This will eliminate many lint errors. Run via:: + + $ nox -s blacken -- PEP8 compliance, with exceptions defined in the linter configuration. +- PEP8 compliance is required, with exceptions defined in the linter configuration. If you have ``nox`` installed, you can test that you have not introduced any non-compliant code via:: @@ -133,13 +142,18 @@ Running System Tests - To run system tests, you can execute:: - $ nox -s system-3.7 + # Run all system tests + $ nox -s system-3.8 $ nox -s system-2.7 + # Run a single system test + $ nox -s system-3.8 -- -k + + .. note:: System tests are only configured to run under Python 2.7 and - Python 3.7. For expediency, we do not run them in older versions + Python 3.8. For expediency, we do not run them in older versions of Python 3. This alone will not run the tests. You'll need to change some local diff --git a/LICENSE b/LICENSE index a8ee855..d645695 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,7 @@ - Apache License + + Apache License Version 2.0, January 2004 - https://www.apache.org/licenses/ + http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION @@ -192,7 +193,7 @@ you may not use this file except in compliance with the License. You may obtain a copy of the License at - https://www.apache.org/licenses/LICENSE-2.0 + http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, diff --git a/MANIFEST.in b/MANIFEST.in index e9e29d1..e783f4c 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -16,10 +16,10 @@ # Generated by synthtool. DO NOT EDIT! include README.rst LICENSE -recursive-include google *.json *.proto +recursive-include google *.json *.proto py.typed recursive-include tests * global-exclude *.py[co] global-exclude __pycache__ # Exclude scripts for samples readmegen -prune scripts/readme-gen \ No newline at end of file +prune scripts/readme-gen diff --git a/docs/_static/custom.css b/docs/_static/custom.css index 0abaf22..bcd37bb 100644 --- a/docs/_static/custom.css +++ b/docs/_static/custom.css @@ -1,4 +1,9 @@ div#python2-eol { border-color: red; border-width: medium; -} \ No newline at end of file +} + +/* Ensure minimum width for 'Parameters' / 'Returns' column */ +dl.field-list > dt { + min-width: 100px +} diff --git a/docs/osconfig_v1/os_config_service.rst b/docs/osconfig_v1/os_config_service.rst new file mode 100644 index 0000000..cb2be75 --- /dev/null +++ b/docs/osconfig_v1/os_config_service.rst @@ -0,0 +1,11 @@ +OsConfigService +--------------------------------- + +.. automodule:: google.cloud.osconfig_v1.services.os_config_service + :members: + :inherited-members: + + +.. automodule:: google.cloud.osconfig_v1.services.os_config_service.pagers + :members: + :inherited-members: diff --git a/docs/osconfig_v1/services.rst b/docs/osconfig_v1/services.rst index dbdc04f..e0dad88 100644 --- a/docs/osconfig_v1/services.rst +++ b/docs/osconfig_v1/services.rst @@ -1,6 +1,6 @@ Services for Google Cloud Osconfig v1 API ========================================= +.. toctree:: + :maxdepth: 2 -.. automodule:: google.cloud.osconfig_v1.services.os_config_service - :members: - :inherited-members: + os_config_service diff --git a/docs/osconfig_v1/types.rst b/docs/osconfig_v1/types.rst index 2fdbc30..5c7249d 100644 --- a/docs/osconfig_v1/types.rst +++ b/docs/osconfig_v1/types.rst @@ -3,4 +3,5 @@ Types for Google Cloud Osconfig v1 API .. automodule:: google.cloud.osconfig_v1.types :members: + :undoc-members: :show-inheritance: diff --git a/google/cloud/osconfig_v1/services/os_config_service/async_client.py b/google/cloud/osconfig_v1/services/os_config_service/async_client.py index 0042cef..c38b706 100644 --- a/google/cloud/osconfig_v1/services/os_config_service/async_client.py +++ b/google/cloud/osconfig_v1/services/os_config_service/async_client.py @@ -89,8 +89,36 @@ class OsConfigServiceAsyncClient: OsConfigServiceClient.parse_common_location_path ) - from_service_account_info = OsConfigServiceClient.from_service_account_info - from_service_account_file = OsConfigServiceClient.from_service_account_file + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OsConfigServiceAsyncClient: The constructed client. + """ + return OsConfigServiceClient.from_service_account_info.__func__(OsConfigServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + OsConfigServiceAsyncClient: The constructed client. + """ + return OsConfigServiceClient.from_service_account_file.__func__(OsConfigServiceAsyncClient, filename, *args, **kwargs) # type: ignore + from_service_account_json = from_service_account_file @property @@ -166,7 +194,7 @@ async def execute_patch_job( job. Args: - request (:class:`~.patch_jobs.ExecutePatchJobRequest`): + request (:class:`google.cloud.osconfig_v1.types.ExecutePatchJobRequest`): The request object. A request message to initiate patching across Compute Engine instances. @@ -177,17 +205,17 @@ async def execute_patch_job( sent along with the request as metadata. Returns: - ~.patch_jobs.PatchJob: - A high level representation of a patch job that is - either in progress or has completed. + google.cloud.osconfig_v1.types.PatchJob: + A high level representation of a patch job that is either in progress + or has completed. - Instance details are not included in the job. To - paginate through instance details, use - ListPatchJobInstanceDetails. + Instance details are not included in the job. To + paginate through instance details, use + ListPatchJobInstanceDetails. - For more information about patch jobs, see `Creating - patch - jobs `__. + For more information about patch jobs, see [Creating + patch + jobs](\ https://cloud.google.com/compute/docs/os-patch-management/create-patch-job). """ # Create or coerce a protobuf request object. @@ -228,12 +256,13 @@ async def get_patch_job( of completed jobs. Args: - request (:class:`~.patch_jobs.GetPatchJobRequest`): + request (:class:`google.cloud.osconfig_v1.types.GetPatchJobRequest`): The request object. Request to get an active or completed patch job. name (:class:`str`): Required. Name of the patch in the form ``projects/*/patchJobs/*`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -245,17 +274,17 @@ async def get_patch_job( sent along with the request as metadata. Returns: - ~.patch_jobs.PatchJob: - A high level representation of a patch job that is - either in progress or has completed. + google.cloud.osconfig_v1.types.PatchJob: + A high level representation of a patch job that is either in progress + or has completed. - Instance details are not included in the job. To - paginate through instance details, use - ListPatchJobInstanceDetails. + Instance details are not included in the job. To + paginate through instance details, use + ListPatchJobInstanceDetails. - For more information about patch jobs, see `Creating - patch - jobs `__. + For more information about patch jobs, see [Creating + patch + jobs](\ https://cloud.google.com/compute/docs/os-patch-management/create-patch-job). """ # Create or coerce a protobuf request object. @@ -308,7 +337,7 @@ async def cancel_patch_job( Canceled patch jobs cannot be restarted. Args: - request (:class:`~.patch_jobs.CancelPatchJobRequest`): + request (:class:`google.cloud.osconfig_v1.types.CancelPatchJobRequest`): The request object. Message for canceling a patch job. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -318,17 +347,17 @@ async def cancel_patch_job( sent along with the request as metadata. Returns: - ~.patch_jobs.PatchJob: - A high level representation of a patch job that is - either in progress or has completed. + google.cloud.osconfig_v1.types.PatchJob: + A high level representation of a patch job that is either in progress + or has completed. - Instance details are not included in the job. To - paginate through instance details, use - ListPatchJobInstanceDetails. + Instance details are not included in the job. To + paginate through instance details, use + ListPatchJobInstanceDetails. - For more information about patch jobs, see `Creating - patch - jobs `__. + For more information about patch jobs, see [Creating + patch + jobs](\ https://cloud.google.com/compute/docs/os-patch-management/create-patch-job). """ # Create or coerce a protobuf request object. @@ -367,7 +396,7 @@ async def list_patch_jobs( r"""Get a list of patch jobs. Args: - request (:class:`~.patch_jobs.ListPatchJobsRequest`): + request (:class:`google.cloud.osconfig_v1.types.ListPatchJobsRequest`): The request object. A request message for listing patch jobs. parent (:class:`str`): @@ -383,7 +412,7 @@ async def list_patch_jobs( sent along with the request as metadata. Returns: - ~.pagers.ListPatchJobsAsyncPager: + google.cloud.osconfig_v1.services.os_config_service.pagers.ListPatchJobsAsyncPager: A response message for listing patch jobs. Iterating over this object will yield @@ -447,12 +476,13 @@ async def list_patch_job_instance_details( r"""Get a list of instance details for a given patch job. Args: - request (:class:`~.patch_jobs.ListPatchJobInstanceDetailsRequest`): + request (:class:`google.cloud.osconfig_v1.types.ListPatchJobInstanceDetailsRequest`): The request object. Request to list details for all instances that are part of a patch job. parent (:class:`str`): Required. The parent for the instances are in the form of ``projects/*/patchJobs/*``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -464,7 +494,7 @@ async def list_patch_job_instance_details( sent along with the request as metadata. Returns: - ~.pagers.ListPatchJobInstanceDetailsAsyncPager: + google.cloud.osconfig_v1.services.os_config_service.pagers.ListPatchJobInstanceDetailsAsyncPager: A response message for listing the instances details for a patch job. Iterating over this object will yield @@ -530,18 +560,20 @@ async def create_patch_deployment( r"""Create an OS Config patch deployment. Args: - request (:class:`~.patch_deployments.CreatePatchDeploymentRequest`): + request (:class:`google.cloud.osconfig_v1.types.CreatePatchDeploymentRequest`): The request object. A request message for creating a patch deployment. parent (:class:`str`): Required. The project to apply this patch deployment to in the form ``projects/*``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - patch_deployment (:class:`~.patch_deployments.PatchDeployment`): + patch_deployment (:class:`google.cloud.osconfig_v1.types.PatchDeployment`): Required. The patch deployment to create. + This corresponds to the ``patch_deployment`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -556,6 +588,7 @@ async def create_patch_deployment( - Must end with a number or a letter. - Must be unique within the project. + This corresponds to the ``patch_deployment_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -567,13 +600,13 @@ async def create_patch_deployment( sent along with the request as metadata. Returns: - ~.patch_deployments.PatchDeployment: - Patch deployments are configurations that individual - patch jobs use to complete a patch. These configurations - include instance filter, package repository settings, - and a schedule. For more information about creating and - managing patch deployments, see `Scheduling patch - jobs `__. + google.cloud.osconfig_v1.types.PatchDeployment: + Patch deployments are configurations that individual patch jobs use to + complete a patch. These configurations include + instance filter, package repository settings, and a + schedule. For more information about creating and + managing patch deployments, see [Scheduling patch + jobs](\ https://cloud.google.com/compute/docs/os-patch-management/schedule-patch-jobs). """ # Create or coerce a protobuf request object. @@ -630,12 +663,13 @@ async def get_patch_deployment( r"""Get an OS Config patch deployment. Args: - request (:class:`~.patch_deployments.GetPatchDeploymentRequest`): + request (:class:`google.cloud.osconfig_v1.types.GetPatchDeploymentRequest`): The request object. A request message for retrieving a patch deployment. name (:class:`str`): Required. The resource name of the patch deployment in the form ``projects/*/patchDeployments/*``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -647,13 +681,13 @@ async def get_patch_deployment( sent along with the request as metadata. Returns: - ~.patch_deployments.PatchDeployment: - Patch deployments are configurations that individual - patch jobs use to complete a patch. These configurations - include instance filter, package repository settings, - and a schedule. For more information about creating and - managing patch deployments, see `Scheduling patch - jobs `__. + google.cloud.osconfig_v1.types.PatchDeployment: + Patch deployments are configurations that individual patch jobs use to + complete a patch. These configurations include + instance filter, package repository settings, and a + schedule. For more information about creating and + managing patch deployments, see [Scheduling patch + jobs](\ https://cloud.google.com/compute/docs/os-patch-management/schedule-patch-jobs). """ # Create or coerce a protobuf request object. @@ -706,12 +740,13 @@ async def list_patch_deployments( r"""Get a page of OS Config patch deployments. Args: - request (:class:`~.patch_deployments.ListPatchDeploymentsRequest`): + request (:class:`google.cloud.osconfig_v1.types.ListPatchDeploymentsRequest`): The request object. A request message for listing patch deployments. parent (:class:`str`): Required. The resource name of the parent in the form ``projects/*``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -723,7 +758,7 @@ async def list_patch_deployments( sent along with the request as metadata. Returns: - ~.pagers.ListPatchDeploymentsAsyncPager: + google.cloud.osconfig_v1.services.os_config_service.pagers.ListPatchDeploymentsAsyncPager: A response message for listing patch deployments. Iterating over this object will yield @@ -787,12 +822,13 @@ async def delete_patch_deployment( r"""Delete an OS Config patch deployment. Args: - request (:class:`~.patch_deployments.DeletePatchDeploymentRequest`): + request (:class:`google.cloud.osconfig_v1.types.DeletePatchDeploymentRequest`): The request object. A request message for deleting a patch deployment. name (:class:`str`): Required. The resource name of the patch deployment in the form ``projects/*/patchDeployments/*``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. diff --git a/google/cloud/osconfig_v1/services/os_config_service/client.py b/google/cloud/osconfig_v1/services/os_config_service/client.py index f1101be..8e6ea72 100644 --- a/google/cloud/osconfig_v1/services/os_config_service/client.py +++ b/google/cloud/osconfig_v1/services/os_config_service/client.py @@ -127,7 +127,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - {@api.name}: The constructed client. + OsConfigServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_info(info) kwargs["credentials"] = credentials @@ -145,7 +145,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - {@api.name}: The constructed client. + OsConfigServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -282,10 +282,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.OsConfigServiceTransport]): The + transport (Union[str, OsConfigServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (client_options_lib.ClientOptions): Custom options for the + client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -321,21 +321,17 @@ def __init__( util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) ) - ssl_credentials = None + client_cert_source_func = None is_mtls = False if use_client_cert: if client_options.client_cert_source: - import grpc # type: ignore - - cert, key = client_options.client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) is_mtls = True + client_cert_source_func = client_options.client_cert_source else: - creds = SslCredentials() - is_mtls = creds.is_mtls - ssl_credentials = creds.ssl_credentials if is_mtls else None + is_mtls = mtls.has_default_client_cert_source() + client_cert_source_func = ( + mtls.default_client_cert_source() if is_mtls else None + ) # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -378,7 +374,7 @@ def __init__( credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, - ssl_channel_credentials=ssl_credentials, + client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, ) @@ -395,7 +391,7 @@ def execute_patch_job( job. Args: - request (:class:`~.patch_jobs.ExecutePatchJobRequest`): + request (google.cloud.osconfig_v1.types.ExecutePatchJobRequest): The request object. A request message to initiate patching across Compute Engine instances. @@ -406,17 +402,17 @@ def execute_patch_job( sent along with the request as metadata. Returns: - ~.patch_jobs.PatchJob: - A high level representation of a patch job that is - either in progress or has completed. + google.cloud.osconfig_v1.types.PatchJob: + A high level representation of a patch job that is either in progress + or has completed. - Instance details are not included in the job. To - paginate through instance details, use - ListPatchJobInstanceDetails. + Instance details are not included in the job. To + paginate through instance details, use + ListPatchJobInstanceDetails. - For more information about patch jobs, see `Creating - patch - jobs `__. + For more information about patch jobs, see [Creating + patch + jobs](\ https://cloud.google.com/compute/docs/os-patch-management/create-patch-job). """ # Create or coerce a protobuf request object. @@ -458,12 +454,13 @@ def get_patch_job( of completed jobs. Args: - request (:class:`~.patch_jobs.GetPatchJobRequest`): + request (google.cloud.osconfig_v1.types.GetPatchJobRequest): The request object. Request to get an active or completed patch job. - name (:class:`str`): + name (str): Required. Name of the patch in the form ``projects/*/patchJobs/*`` + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -475,17 +472,17 @@ def get_patch_job( sent along with the request as metadata. Returns: - ~.patch_jobs.PatchJob: - A high level representation of a patch job that is - either in progress or has completed. + google.cloud.osconfig_v1.types.PatchJob: + A high level representation of a patch job that is either in progress + or has completed. - Instance details are not included in the job. To - paginate through instance details, use - ListPatchJobInstanceDetails. + Instance details are not included in the job. To + paginate through instance details, use + ListPatchJobInstanceDetails. - For more information about patch jobs, see `Creating - patch - jobs `__. + For more information about patch jobs, see [Creating + patch + jobs](\ https://cloud.google.com/compute/docs/os-patch-management/create-patch-job). """ # Create or coerce a protobuf request object. @@ -539,7 +536,7 @@ def cancel_patch_job( Canceled patch jobs cannot be restarted. Args: - request (:class:`~.patch_jobs.CancelPatchJobRequest`): + request (google.cloud.osconfig_v1.types.CancelPatchJobRequest): The request object. Message for canceling a patch job. retry (google.api_core.retry.Retry): Designation of what errors, if any, @@ -549,17 +546,17 @@ def cancel_patch_job( sent along with the request as metadata. Returns: - ~.patch_jobs.PatchJob: - A high level representation of a patch job that is - either in progress or has completed. + google.cloud.osconfig_v1.types.PatchJob: + A high level representation of a patch job that is either in progress + or has completed. - Instance details are not included in the job. To - paginate through instance details, use - ListPatchJobInstanceDetails. + Instance details are not included in the job. To + paginate through instance details, use + ListPatchJobInstanceDetails. - For more information about patch jobs, see `Creating - patch - jobs `__. + For more information about patch jobs, see [Creating + patch + jobs](\ https://cloud.google.com/compute/docs/os-patch-management/create-patch-job). """ # Create or coerce a protobuf request object. @@ -599,10 +596,10 @@ def list_patch_jobs( r"""Get a list of patch jobs. Args: - request (:class:`~.patch_jobs.ListPatchJobsRequest`): + request (google.cloud.osconfig_v1.types.ListPatchJobsRequest): The request object. A request message for listing patch jobs. - parent (:class:`str`): + parent (str): Required. In the form of ``projects/*`` This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this @@ -615,7 +612,7 @@ def list_patch_jobs( sent along with the request as metadata. Returns: - ~.pagers.ListPatchJobsPager: + google.cloud.osconfig_v1.services.os_config_service.pagers.ListPatchJobsPager: A response message for listing patch jobs. Iterating over this object will yield @@ -680,12 +677,13 @@ def list_patch_job_instance_details( r"""Get a list of instance details for a given patch job. Args: - request (:class:`~.patch_jobs.ListPatchJobInstanceDetailsRequest`): + request (google.cloud.osconfig_v1.types.ListPatchJobInstanceDetailsRequest): The request object. Request to list details for all instances that are part of a patch job. - parent (:class:`str`): + parent (str): Required. The parent for the instances are in the form of ``projects/*/patchJobs/*``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -697,7 +695,7 @@ def list_patch_job_instance_details( sent along with the request as metadata. Returns: - ~.pagers.ListPatchJobInstanceDetailsPager: + google.cloud.osconfig_v1.services.os_config_service.pagers.ListPatchJobInstanceDetailsPager: A response message for listing the instances details for a patch job. Iterating over this object will yield @@ -766,22 +764,24 @@ def create_patch_deployment( r"""Create an OS Config patch deployment. Args: - request (:class:`~.patch_deployments.CreatePatchDeploymentRequest`): + request (google.cloud.osconfig_v1.types.CreatePatchDeploymentRequest): The request object. A request message for creating a patch deployment. - parent (:class:`str`): + parent (str): Required. The project to apply this patch deployment to in the form ``projects/*``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - patch_deployment (:class:`~.patch_deployments.PatchDeployment`): + patch_deployment (google.cloud.osconfig_v1.types.PatchDeployment): Required. The patch deployment to create. + This corresponds to the ``patch_deployment`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - patch_deployment_id (:class:`str`): + patch_deployment_id (str): Required. A name for the patch deployment in the project. When creating a name the following rules apply: @@ -792,6 +792,7 @@ def create_patch_deployment( - Must end with a number or a letter. - Must be unique within the project. + This corresponds to the ``patch_deployment_id`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -803,13 +804,13 @@ def create_patch_deployment( sent along with the request as metadata. Returns: - ~.patch_deployments.PatchDeployment: - Patch deployments are configurations that individual - patch jobs use to complete a patch. These configurations - include instance filter, package repository settings, - and a schedule. For more information about creating and - managing patch deployments, see `Scheduling patch - jobs `__. + google.cloud.osconfig_v1.types.PatchDeployment: + Patch deployments are configurations that individual patch jobs use to + complete a patch. These configurations include + instance filter, package repository settings, and a + schedule. For more information about creating and + managing patch deployments, see [Scheduling patch + jobs](\ https://cloud.google.com/compute/docs/os-patch-management/schedule-patch-jobs). """ # Create or coerce a protobuf request object. @@ -867,12 +868,13 @@ def get_patch_deployment( r"""Get an OS Config patch deployment. Args: - request (:class:`~.patch_deployments.GetPatchDeploymentRequest`): + request (google.cloud.osconfig_v1.types.GetPatchDeploymentRequest): The request object. A request message for retrieving a patch deployment. - name (:class:`str`): + name (str): Required. The resource name of the patch deployment in the form ``projects/*/patchDeployments/*``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -884,13 +886,13 @@ def get_patch_deployment( sent along with the request as metadata. Returns: - ~.patch_deployments.PatchDeployment: - Patch deployments are configurations that individual - patch jobs use to complete a patch. These configurations - include instance filter, package repository settings, - and a schedule. For more information about creating and - managing patch deployments, see `Scheduling patch - jobs `__. + google.cloud.osconfig_v1.types.PatchDeployment: + Patch deployments are configurations that individual patch jobs use to + complete a patch. These configurations include + instance filter, package repository settings, and a + schedule. For more information about creating and + managing patch deployments, see [Scheduling patch + jobs](\ https://cloud.google.com/compute/docs/os-patch-management/schedule-patch-jobs). """ # Create or coerce a protobuf request object. @@ -944,12 +946,13 @@ def list_patch_deployments( r"""Get a page of OS Config patch deployments. Args: - request (:class:`~.patch_deployments.ListPatchDeploymentsRequest`): + request (google.cloud.osconfig_v1.types.ListPatchDeploymentsRequest): The request object. A request message for listing patch deployments. - parent (:class:`str`): + parent (str): Required. The resource name of the parent in the form ``projects/*``. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -961,7 +964,7 @@ def list_patch_deployments( sent along with the request as metadata. Returns: - ~.pagers.ListPatchDeploymentsPager: + google.cloud.osconfig_v1.services.os_config_service.pagers.ListPatchDeploymentsPager: A response message for listing patch deployments. Iterating over this object will yield @@ -1026,12 +1029,13 @@ def delete_patch_deployment( r"""Delete an OS Config patch deployment. Args: - request (:class:`~.patch_deployments.DeletePatchDeploymentRequest`): + request (google.cloud.osconfig_v1.types.DeletePatchDeploymentRequest): The request object. A request message for deleting a patch deployment. - name (:class:`str`): + name (str): Required. The resource name of the patch deployment in the form ``projects/*/patchDeployments/*``. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. diff --git a/google/cloud/osconfig_v1/services/os_config_service/pagers.py b/google/cloud/osconfig_v1/services/os_config_service/pagers.py index f9593f5..9a57712 100644 --- a/google/cloud/osconfig_v1/services/os_config_service/pagers.py +++ b/google/cloud/osconfig_v1/services/os_config_service/pagers.py @@ -15,7 +15,16 @@ # limitations under the License. # -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) from google.cloud.osconfig_v1.types import patch_deployments from google.cloud.osconfig_v1.types import patch_jobs @@ -25,7 +34,7 @@ class ListPatchJobsPager: """A pager for iterating through ``list_patch_jobs`` requests. This class thinly wraps an initial - :class:`~.patch_jobs.ListPatchJobsResponse` object, and + :class:`google.cloud.osconfig_v1.types.ListPatchJobsResponse` object, and provides an ``__iter__`` method to iterate through its ``patch_jobs`` field. @@ -34,7 +43,7 @@ class ListPatchJobsPager: through the ``patch_jobs`` field on the corresponding responses. - All the usual :class:`~.patch_jobs.ListPatchJobsResponse` + All the usual :class:`google.cloud.osconfig_v1.types.ListPatchJobsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -52,9 +61,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.patch_jobs.ListPatchJobsRequest`): + request (google.cloud.osconfig_v1.types.ListPatchJobsRequest): The initial request object. - response (:class:`~.patch_jobs.ListPatchJobsResponse`): + response (google.cloud.osconfig_v1.types.ListPatchJobsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -87,7 +96,7 @@ class ListPatchJobsAsyncPager: """A pager for iterating through ``list_patch_jobs`` requests. This class thinly wraps an initial - :class:`~.patch_jobs.ListPatchJobsResponse` object, and + :class:`google.cloud.osconfig_v1.types.ListPatchJobsResponse` object, and provides an ``__aiter__`` method to iterate through its ``patch_jobs`` field. @@ -96,7 +105,7 @@ class ListPatchJobsAsyncPager: through the ``patch_jobs`` field on the corresponding responses. - All the usual :class:`~.patch_jobs.ListPatchJobsResponse` + All the usual :class:`google.cloud.osconfig_v1.types.ListPatchJobsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -114,9 +123,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.patch_jobs.ListPatchJobsRequest`): + request (google.cloud.osconfig_v1.types.ListPatchJobsRequest): The initial request object. - response (:class:`~.patch_jobs.ListPatchJobsResponse`): + response (google.cloud.osconfig_v1.types.ListPatchJobsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -153,7 +162,7 @@ class ListPatchJobInstanceDetailsPager: """A pager for iterating through ``list_patch_job_instance_details`` requests. This class thinly wraps an initial - :class:`~.patch_jobs.ListPatchJobInstanceDetailsResponse` object, and + :class:`google.cloud.osconfig_v1.types.ListPatchJobInstanceDetailsResponse` object, and provides an ``__iter__`` method to iterate through its ``patch_job_instance_details`` field. @@ -162,7 +171,7 @@ class ListPatchJobInstanceDetailsPager: through the ``patch_job_instance_details`` field on the corresponding responses. - All the usual :class:`~.patch_jobs.ListPatchJobInstanceDetailsResponse` + All the usual :class:`google.cloud.osconfig_v1.types.ListPatchJobInstanceDetailsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -180,9 +189,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.patch_jobs.ListPatchJobInstanceDetailsRequest`): + request (google.cloud.osconfig_v1.types.ListPatchJobInstanceDetailsRequest): The initial request object. - response (:class:`~.patch_jobs.ListPatchJobInstanceDetailsResponse`): + response (google.cloud.osconfig_v1.types.ListPatchJobInstanceDetailsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -215,7 +224,7 @@ class ListPatchJobInstanceDetailsAsyncPager: """A pager for iterating through ``list_patch_job_instance_details`` requests. This class thinly wraps an initial - :class:`~.patch_jobs.ListPatchJobInstanceDetailsResponse` object, and + :class:`google.cloud.osconfig_v1.types.ListPatchJobInstanceDetailsResponse` object, and provides an ``__aiter__`` method to iterate through its ``patch_job_instance_details`` field. @@ -224,7 +233,7 @@ class ListPatchJobInstanceDetailsAsyncPager: through the ``patch_job_instance_details`` field on the corresponding responses. - All the usual :class:`~.patch_jobs.ListPatchJobInstanceDetailsResponse` + All the usual :class:`google.cloud.osconfig_v1.types.ListPatchJobInstanceDetailsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -244,9 +253,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.patch_jobs.ListPatchJobInstanceDetailsRequest`): + request (google.cloud.osconfig_v1.types.ListPatchJobInstanceDetailsRequest): The initial request object. - response (:class:`~.patch_jobs.ListPatchJobInstanceDetailsResponse`): + response (google.cloud.osconfig_v1.types.ListPatchJobInstanceDetailsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -285,7 +294,7 @@ class ListPatchDeploymentsPager: """A pager for iterating through ``list_patch_deployments`` requests. This class thinly wraps an initial - :class:`~.patch_deployments.ListPatchDeploymentsResponse` object, and + :class:`google.cloud.osconfig_v1.types.ListPatchDeploymentsResponse` object, and provides an ``__iter__`` method to iterate through its ``patch_deployments`` field. @@ -294,7 +303,7 @@ class ListPatchDeploymentsPager: through the ``patch_deployments`` field on the corresponding responses. - All the usual :class:`~.patch_deployments.ListPatchDeploymentsResponse` + All the usual :class:`google.cloud.osconfig_v1.types.ListPatchDeploymentsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -312,9 +321,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.patch_deployments.ListPatchDeploymentsRequest`): + request (google.cloud.osconfig_v1.types.ListPatchDeploymentsRequest): The initial request object. - response (:class:`~.patch_deployments.ListPatchDeploymentsResponse`): + response (google.cloud.osconfig_v1.types.ListPatchDeploymentsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -347,7 +356,7 @@ class ListPatchDeploymentsAsyncPager: """A pager for iterating through ``list_patch_deployments`` requests. This class thinly wraps an initial - :class:`~.patch_deployments.ListPatchDeploymentsResponse` object, and + :class:`google.cloud.osconfig_v1.types.ListPatchDeploymentsResponse` object, and provides an ``__aiter__`` method to iterate through its ``patch_deployments`` field. @@ -356,7 +365,7 @@ class ListPatchDeploymentsAsyncPager: through the ``patch_deployments`` field on the corresponding responses. - All the usual :class:`~.patch_deployments.ListPatchDeploymentsResponse` + All the usual :class:`google.cloud.osconfig_v1.types.ListPatchDeploymentsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -376,9 +385,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.patch_deployments.ListPatchDeploymentsRequest`): + request (google.cloud.osconfig_v1.types.ListPatchDeploymentsRequest): The initial request object. - response (:class:`~.patch_deployments.ListPatchDeploymentsResponse`): + response (google.cloud.osconfig_v1.types.ListPatchDeploymentsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. diff --git a/google/cloud/osconfig_v1/services/os_config_service/transports/base.py b/google/cloud/osconfig_v1/services/os_config_service/transports/base.py index 5ef50cc..165135e 100644 --- a/google/cloud/osconfig_v1/services/os_config_service/transports/base.py +++ b/google/cloud/osconfig_v1/services/os_config_service/transports/base.py @@ -69,10 +69,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. @@ -80,6 +80,9 @@ def __init__( host += ":443" self._host = host + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: @@ -89,20 +92,17 @@ def __init__( if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id ) elif credentials is None: credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id + scopes=self._scopes, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { diff --git a/google/cloud/osconfig_v1/services/os_config_service/transports/grpc.py b/google/cloud/osconfig_v1/services/os_config_service/transports/grpc.py index 45057c7..1302a15 100644 --- a/google/cloud/osconfig_v1/services/os_config_service/transports/grpc.py +++ b/google/cloud/osconfig_v1/services/os_config_service/transports/grpc.py @@ -62,6 +62,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -92,6 +93,10 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -106,72 +111,60 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) else: - ssl_credentials = SslCredentials().ssl_credentials - - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials - else: - host = host if ":" in host else host + ":443" + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -179,17 +172,8 @@ def __init__( ], ) - self._stubs = {} # type: Dict[str, Callable] - - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @classmethod def create_channel( @@ -203,7 +187,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/google/cloud/osconfig_v1/services/os_config_service/transports/grpc_asyncio.py b/google/cloud/osconfig_v1/services/os_config_service/transports/grpc_asyncio.py index 54903c7..f579fb1 100644 --- a/google/cloud/osconfig_v1/services/os_config_service/transports/grpc_asyncio.py +++ b/google/cloud/osconfig_v1/services/os_config_service/transports/grpc_asyncio.py @@ -66,7 +66,7 @@ def create_channel( ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -106,6 +106,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -137,12 +138,16 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -151,72 +156,60 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) else: - ssl_credentials = SslCredentials().ssl_credentials - - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials - else: - host = host if ":" in host else host + ":443" + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -224,17 +217,8 @@ def __init__( ], ) - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) - - self._stubs = {} + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: diff --git a/google/cloud/osconfig_v1/types/__init__.py b/google/cloud/osconfig_v1/types/__init__.py index 957e7b3..7020841 100644 --- a/google/cloud/osconfig_v1/types/__init__.py +++ b/google/cloud/osconfig_v1/types/__init__.py @@ -15,78 +15,78 @@ # limitations under the License. # +from .inventory import Inventory from .osconfig_common import FixedOrPercent +from .patch_deployments import ( + CreatePatchDeploymentRequest, + DeletePatchDeploymentRequest, + GetPatchDeploymentRequest, + ListPatchDeploymentsRequest, + ListPatchDeploymentsResponse, + MonthlySchedule, + OneTimeSchedule, + PatchDeployment, + RecurringSchedule, + WeekDayOfMonth, + WeeklySchedule, +) from .patch_jobs import ( + AptSettings, + CancelPatchJobRequest, + ExecStep, + ExecStepConfig, ExecutePatchJobRequest, + GcsObject, GetPatchJobRequest, + GooSettings, + Instance, ListPatchJobInstanceDetailsRequest, ListPatchJobInstanceDetailsResponse, - PatchJobInstanceDetails, ListPatchJobsRequest, ListPatchJobsResponse, - PatchJob, PatchConfig, - Instance, - CancelPatchJobRequest, - AptSettings, - YumSettings, - GooSettings, - ZypperSettings, - WindowsUpdateSettings, - ExecStep, - ExecStepConfig, - GcsObject, PatchInstanceFilter, + PatchJob, + PatchJobInstanceDetails, PatchRollout, + WindowsUpdateSettings, + YumSettings, + ZypperSettings, ) -from .patch_deployments import ( - PatchDeployment, - OneTimeSchedule, - RecurringSchedule, - WeeklySchedule, - MonthlySchedule, - WeekDayOfMonth, - CreatePatchDeploymentRequest, - GetPatchDeploymentRequest, - ListPatchDeploymentsRequest, - ListPatchDeploymentsResponse, - DeletePatchDeploymentRequest, -) -from .inventory import Inventory __all__ = ( + "Inventory", "FixedOrPercent", + "CreatePatchDeploymentRequest", + "DeletePatchDeploymentRequest", + "GetPatchDeploymentRequest", + "ListPatchDeploymentsRequest", + "ListPatchDeploymentsResponse", + "MonthlySchedule", + "OneTimeSchedule", + "PatchDeployment", + "RecurringSchedule", + "WeekDayOfMonth", + "WeeklySchedule", + "AptSettings", + "CancelPatchJobRequest", + "ExecStep", + "ExecStepConfig", "ExecutePatchJobRequest", + "GcsObject", "GetPatchJobRequest", + "GooSettings", + "Instance", "ListPatchJobInstanceDetailsRequest", "ListPatchJobInstanceDetailsResponse", - "PatchJobInstanceDetails", "ListPatchJobsRequest", "ListPatchJobsResponse", - "PatchJob", "PatchConfig", - "Instance", - "CancelPatchJobRequest", - "AptSettings", - "YumSettings", - "GooSettings", - "ZypperSettings", - "WindowsUpdateSettings", - "ExecStep", - "ExecStepConfig", - "GcsObject", "PatchInstanceFilter", + "PatchJob", + "PatchJobInstanceDetails", "PatchRollout", - "PatchDeployment", - "OneTimeSchedule", - "RecurringSchedule", - "WeeklySchedule", - "MonthlySchedule", - "WeekDayOfMonth", - "CreatePatchDeploymentRequest", - "GetPatchDeploymentRequest", - "ListPatchDeploymentsRequest", - "ListPatchDeploymentsResponse", - "DeletePatchDeploymentRequest", - "Inventory", + "WindowsUpdateSettings", + "YumSettings", + "ZypperSettings", ) diff --git a/google/cloud/osconfig_v1/types/inventory.py b/google/cloud/osconfig_v1/types/inventory.py index bb25196..68d1a75 100644 --- a/google/cloud/osconfig_v1/types/inventory.py +++ b/google/cloud/osconfig_v1/types/inventory.py @@ -30,10 +30,10 @@ class Inventory(proto.Message): r"""The inventory details of a VM. Attributes: - os_info (~.inventory.Inventory.OsInfo): + os_info (google.cloud.osconfig_v1.types.Inventory.OsInfo): Base level operating system information for the VM. - items (Sequence[~.inventory.Inventory.ItemsEntry]): + items (Sequence[google.cloud.osconfig_v1.types.Inventory.ItemsEntry]): Inventory items related to the VM keyed by an opaque unique identifier for each inventory item. The identifier is unique to each distinct @@ -91,18 +91,18 @@ class Item(proto.Message): id (str): Identifier for this item, unique across items for this VM. - origin_type (~.inventory.Inventory.Item.OriginType): + origin_type (google.cloud.osconfig_v1.types.Inventory.Item.OriginType): The origin of this inventory item. - create_time (~.timestamp.Timestamp): + create_time (google.protobuf.timestamp_pb2.Timestamp): When this inventory item was first detected. - update_time (~.timestamp.Timestamp): + update_time (google.protobuf.timestamp_pb2.Timestamp): When this inventory item was last modified. - type_ (~.inventory.Inventory.Item.Type): + type_ (google.cloud.osconfig_v1.types.Inventory.Item.Type): The specific type of inventory, correlating to its specific details. - installed_package (~.inventory.Inventory.SoftwarePackage): + installed_package (google.cloud.osconfig_v1.types.Inventory.SoftwarePackage): Software package present on the VM instance. - available_package (~.inventory.Inventory.SoftwarePackage): + available_package (google.cloud.osconfig_v1.types.Inventory.SoftwarePackage): Software package available to be installed on the VM instance. """ @@ -148,37 +148,37 @@ class SoftwarePackage(proto.Message): r"""Software package information of the operating system. Attributes: - yum_package (~.inventory.Inventory.VersionedPackage): + yum_package (google.cloud.osconfig_v1.types.Inventory.VersionedPackage): Yum package info. For details about the yum package manager, see https://access.redhat.com/documentation/en-us/red_hat_enterprise_linux/6/html/deployment_guide/ch-yum. - apt_package (~.inventory.Inventory.VersionedPackage): + apt_package (google.cloud.osconfig_v1.types.Inventory.VersionedPackage): Details of an APT package. For details about the apt package manager, see https://wiki.debian.org/Apt. - zypper_package (~.inventory.Inventory.VersionedPackage): + zypper_package (google.cloud.osconfig_v1.types.Inventory.VersionedPackage): Details of a Zypper package. For details about the Zypper package manager, see https://en.opensuse.org/SDB:Zypper_manual. - googet_package (~.inventory.Inventory.VersionedPackage): + googet_package (google.cloud.osconfig_v1.types.Inventory.VersionedPackage): Details of a Googet package. For details about the googet package manager, see https://github.com/google/googet. - zypper_patch (~.inventory.Inventory.ZypperPatch): + zypper_patch (google.cloud.osconfig_v1.types.Inventory.ZypperPatch): Details of a Zypper patch. For details about the Zypper package manager, see https://en.opensuse.org/SDB:Zypper_manual. - wua_package (~.inventory.Inventory.WindowsUpdatePackage): + wua_package (google.cloud.osconfig_v1.types.Inventory.WindowsUpdatePackage): Details of a Windows Update package. See https://docs.microsoft.com/en-us/windows/win32/api/_wua/ for information about Windows Update. - qfe_package (~.inventory.Inventory.WindowsQuickFixEngineeringPackage): + qfe_package (google.cloud.osconfig_v1.types.Inventory.WindowsQuickFixEngineeringPackage): Details of a Windows Quick Fix engineering package. See https://docs.microsoft.com/en- us/windows/win32/cimwin32prov/win32-quickfixengineering for info in Windows Quick Fix Engineering. - cos_package (~.inventory.Inventory.VersionedPackage): + cos_package (google.cloud.osconfig_v1.types.Inventory.VersionedPackage): Details of a COS package. """ @@ -269,7 +269,7 @@ class WindowsUpdatePackage(proto.Message): description (str): The localized description of the update package. - categories (Sequence[~.inventory.Inventory.WindowsUpdatePackage.WindowsUpdateCategory]): + categories (Sequence[google.cloud.osconfig_v1.types.Inventory.WindowsUpdatePackage.WindowsUpdateCategory]): The categories that are associated with this update package. kb_article_ids (Sequence[str]): @@ -287,7 +287,7 @@ class WindowsUpdatePackage(proto.Message): Stays the same across revisions. revision_number (int): The revision number of this update package. - last_deployment_change_time (~.timestamp.Timestamp): + last_deployment_change_time (google.protobuf.timestamp_pb2.Timestamp): The last published date of the update, in (UTC) date and time. """ @@ -370,7 +370,7 @@ class WindowsQuickFixEngineeringPackage(proto.Message): hot_fix_id (str): Unique identifier associated with a particular QFE update. - install_time (~.timestamp.Timestamp): + install_time (google.protobuf.timestamp_pb2.Timestamp): Date that the QFE update was installed. Mapped from installed_on field. """ diff --git a/google/cloud/osconfig_v1/types/patch_deployments.py b/google/cloud/osconfig_v1/types/patch_deployments.py index 27982ee..4c36884 100644 --- a/google/cloud/osconfig_v1/types/patch_deployments.py +++ b/google/cloud/osconfig_v1/types/patch_deployments.py @@ -62,34 +62,34 @@ class PatchDeployment(proto.Message): Optional. Description of the patch deployment. Length of the description is limited to 1024 characters. - instance_filter (~.patch_jobs.PatchInstanceFilter): + instance_filter (google.cloud.osconfig_v1.types.PatchInstanceFilter): Required. VM instances to patch. - patch_config (~.patch_jobs.PatchConfig): + patch_config (google.cloud.osconfig_v1.types.PatchConfig): Optional. Patch configuration that is applied. - duration (~.gp_duration.Duration): + duration (google.protobuf.duration_pb2.Duration): Optional. Duration of the patch. After the duration ends, the patch times out. - one_time_schedule (~.gco_patch_deployments.OneTimeSchedule): + one_time_schedule (google.cloud.osconfig_v1.types.OneTimeSchedule): Required. Schedule a one-time execution. - recurring_schedule (~.gco_patch_deployments.RecurringSchedule): + recurring_schedule (google.cloud.osconfig_v1.types.RecurringSchedule): Required. Schedule recurring executions. - create_time (~.timestamp.Timestamp): + create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Time the patch deployment was created. Timestamp is in `RFC3339 `__ text format. - update_time (~.timestamp.Timestamp): + update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Time the patch deployment was last updated. Timestamp is in `RFC3339 `__ text format. - last_execute_time (~.timestamp.Timestamp): + last_execute_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The last time a patch job was started by this deployment. Timestamp is in `RFC3339 `__ text format. - rollout (~.patch_jobs.PatchRollout): + rollout (google.cloud.osconfig_v1.types.PatchRollout): Optional. Rollout strategy of the patch job. """ @@ -129,7 +129,7 @@ class OneTimeSchedule(proto.Message): `RFC3339 `__ text format. Attributes: - execute_time (~.timestamp.Timestamp): + execute_time (google.protobuf.timestamp_pb2.Timestamp): Required. The desired patch job execution time. """ @@ -141,31 +141,31 @@ class RecurringSchedule(proto.Message): r"""Sets the time for recurring patch deployments. Attributes: - time_zone (~.datetime.TimeZone): + time_zone (google.type.datetime_pb2.TimeZone): Required. Defines the time zone that ``time_of_day`` is relative to. The rules for daylight saving time are determined by the chosen time zone. - start_time (~.timestamp.Timestamp): + start_time (google.protobuf.timestamp_pb2.Timestamp): Optional. The time that the recurring schedule becomes effective. Defaults to ``create_time`` of the patch deployment. - end_time (~.timestamp.Timestamp): + end_time (google.protobuf.timestamp_pb2.Timestamp): Optional. The end time at which a recurring patch deployment schedule is no longer active. - time_of_day (~.timeofday.TimeOfDay): + time_of_day (google.type.timeofday_pb2.TimeOfDay): Required. Time of the day to run a recurring deployment. - frequency (~.gco_patch_deployments.RecurringSchedule.Frequency): + frequency (google.cloud.osconfig_v1.types.RecurringSchedule.Frequency): Required. The frequency unit of this recurring schedule. - weekly (~.gco_patch_deployments.WeeklySchedule): + weekly (google.cloud.osconfig_v1.types.WeeklySchedule): Required. Schedule with weekly executions. - monthly (~.gco_patch_deployments.MonthlySchedule): + monthly (google.cloud.osconfig_v1.types.MonthlySchedule): Required. Schedule with monthly executions. - last_execute_time (~.timestamp.Timestamp): + last_execute_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time the last patch job ran successfully. - next_execute_time (~.timestamp.Timestamp): + next_execute_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time the next patch job is scheduled to run. """ @@ -207,7 +207,7 @@ class WeeklySchedule(proto.Message): r"""Represents a weekly schedule. Attributes: - day_of_week (~.dayofweek.DayOfWeek): + day_of_week (google.type.dayofweek_pb2.DayOfWeek): Required. Day of the week. """ @@ -220,7 +220,7 @@ class MonthlySchedule(proto.Message): of the month". Attributes: - week_day_of_month (~.gco_patch_deployments.WeekDayOfMonth): + week_day_of_month (google.cloud.osconfig_v1.types.WeekDayOfMonth): Required. Week day in a month. month_day (int): Required. One day of the month. 1-31 @@ -247,7 +247,7 @@ class WeekDayOfMonth(proto.Message): Required. Week number in a month. 1-4 indicates the 1st to 4th week of the month. -1 indicates the last week of the month. - day_of_week (~.dayofweek.DayOfWeek): + day_of_week (google.type.dayofweek_pb2.DayOfWeek): Required. A day of the week. """ @@ -273,7 +273,7 @@ class CreatePatchDeploymentRequest(proto.Message): - Must be between 1-63 characters. - Must end with a number or a letter. - Must be unique within the project. - patch_deployment (~.gco_patch_deployments.PatchDeployment): + patch_deployment (google.cloud.osconfig_v1.types.PatchDeployment): Required. The patch deployment to create. """ @@ -324,7 +324,7 @@ class ListPatchDeploymentsResponse(proto.Message): r"""A response message for listing patch deployments. Attributes: - patch_deployments (Sequence[~.gco_patch_deployments.PatchDeployment]): + patch_deployments (Sequence[google.cloud.osconfig_v1.types.PatchDeployment]): The list of patch deployments. next_page_token (str): A pagination token that can be used to get diff --git a/google/cloud/osconfig_v1/types/patch_jobs.py b/google/cloud/osconfig_v1/types/patch_jobs.py index 35fec12..0b84a24 100644 --- a/google/cloud/osconfig_v1/types/patch_jobs.py +++ b/google/cloud/osconfig_v1/types/patch_jobs.py @@ -62,15 +62,15 @@ class ExecutePatchJobRequest(proto.Message): description (str): Description of the patch job. Length of the description is limited to 1024 characters. - instance_filter (~.gco_patch_jobs.PatchInstanceFilter): + instance_filter (google.cloud.osconfig_v1.types.PatchInstanceFilter): Required. Instances to patch, either explicitly or filtered by some criteria such as zone or labels. - patch_config (~.gco_patch_jobs.PatchConfig): + patch_config (google.cloud.osconfig_v1.types.PatchConfig): Patch configuration being applied. If omitted, instances are patched using the default configurations. - duration (~.gp_duration.Duration): + duration (google.protobuf.duration_pb2.Duration): Duration of the patch job. After the duration ends, the patch job times out. dry_run (bool): @@ -79,7 +79,7 @@ class ExecutePatchJobRequest(proto.Message): display_name (str): Display name for this patch job. This does not have to be unique. - rollout (~.gco_patch_jobs.PatchRollout): + rollout (google.cloud.osconfig_v1.types.PatchRollout): Rollout strategy of the patch job. """ @@ -149,7 +149,7 @@ class ListPatchJobInstanceDetailsResponse(proto.Message): patch job. Attributes: - patch_job_instance_details (Sequence[~.gco_patch_jobs.PatchJobInstanceDetails]): + patch_job_instance_details (Sequence[google.cloud.osconfig_v1.types.PatchJobInstanceDetails]): A list of instance status. next_page_token (str): A pagination token that can be used to get @@ -180,7 +180,7 @@ class PatchJobInstanceDetails(proto.Message): instance_system_id (str): The unique identifier for the instance. This identifier is defined by the server. - state (~.gco_patch_jobs.Instance.PatchState): + state (google.cloud.osconfig_v1.types.Instance.PatchState): Current state of instance patch. failure_reason (str): If the patch fails, this field provides the @@ -233,7 +233,7 @@ class ListPatchJobsResponse(proto.Message): r"""A response message for listing patch jobs. Attributes: - patch_jobs (Sequence[~.gco_patch_jobs.PatchJob]): + patch_jobs (Sequence[google.cloud.osconfig_v1.types.PatchJob]): The list of patch jobs. next_page_token (str): A pagination token that can be used to get @@ -269,20 +269,20 @@ class PatchJob(proto.Message): description (str): Description of the patch job. Length of the description is limited to 1024 characters. - create_time (~.timestamp.Timestamp): + create_time (google.protobuf.timestamp_pb2.Timestamp): Time this patch job was created. - update_time (~.timestamp.Timestamp): + update_time (google.protobuf.timestamp_pb2.Timestamp): Last time this patch job was updated. - state (~.gco_patch_jobs.PatchJob.State): + state (google.cloud.osconfig_v1.types.PatchJob.State): The current state of the PatchJob. - instance_filter (~.gco_patch_jobs.PatchInstanceFilter): + instance_filter (google.cloud.osconfig_v1.types.PatchInstanceFilter): Instances to patch. - patch_config (~.gco_patch_jobs.PatchConfig): + patch_config (google.cloud.osconfig_v1.types.PatchConfig): Patch configuration being applied. - duration (~.gp_duration.Duration): + duration (google.protobuf.duration_pb2.Duration): Duration of the patch job. After the duration ends, the patch job times out. - instance_details_summary (~.gco_patch_jobs.PatchJob.InstanceDetailsSummary): + instance_details_summary (google.cloud.osconfig_v1.types.PatchJob.InstanceDetailsSummary): Summary of instance details. dry_run (bool): If this patch job is a dry run, the agent @@ -298,7 +298,7 @@ class PatchJob(proto.Message): patch_deployment (str): Output only. Name of the patch deployment that created this patch job. - rollout (~.gco_patch_jobs.PatchRollout): + rollout (google.cloud.osconfig_v1.types.PatchRollout): Rollout strategy being applied. """ @@ -434,26 +434,26 @@ class PatchConfig(proto.Message): to apply the patch(es) to a VM instance. Attributes: - reboot_config (~.gco_patch_jobs.PatchConfig.RebootConfig): + reboot_config (google.cloud.osconfig_v1.types.PatchConfig.RebootConfig): Post-patch reboot settings. - apt (~.gco_patch_jobs.AptSettings): + apt (google.cloud.osconfig_v1.types.AptSettings): Apt update settings. Use this setting to override the default ``apt`` patch rules. - yum (~.gco_patch_jobs.YumSettings): + yum (google.cloud.osconfig_v1.types.YumSettings): Yum update settings. Use this setting to override the default ``yum`` patch rules. - goo (~.gco_patch_jobs.GooSettings): + goo (google.cloud.osconfig_v1.types.GooSettings): Goo update settings. Use this setting to override the default ``goo`` patch rules. - zypper (~.gco_patch_jobs.ZypperSettings): + zypper (google.cloud.osconfig_v1.types.ZypperSettings): Zypper update settings. Use this setting to override the default ``zypper`` patch rules. - windows_update (~.gco_patch_jobs.WindowsUpdateSettings): + windows_update (google.cloud.osconfig_v1.types.WindowsUpdateSettings): Windows update settings. Use this override the default windows patch rules. - pre_step (~.gco_patch_jobs.ExecStep): + pre_step (google.cloud.osconfig_v1.types.ExecStep): The ``ExecStep`` to run before the patch update. - post_step (~.gco_patch_jobs.ExecStep): + post_step (google.cloud.osconfig_v1.types.ExecStep): The ``ExecStep`` to run after the patch update. """ @@ -524,7 +524,7 @@ class AptSettings(proto.Message): to control how this is executed. Attributes: - type_ (~.gco_patch_jobs.AptSettings.Type): + type_ (google.cloud.osconfig_v1.types.AptSettings.Type): By changing the type to DIST, the patching is performed using ``apt-get dist-upgrade`` instead. excludes (Sequence[str]): @@ -632,7 +632,7 @@ class WindowsUpdateSettings(proto.Message): r"""Windows patching is performed using the Windows Update Agent. Attributes: - classifications (Sequence[~.gco_patch_jobs.WindowsUpdateSettings.Classification]): + classifications (Sequence[google.cloud.osconfig_v1.types.WindowsUpdateSettings.Classification]): Only apply updates of these windows update classifications. If empty, all updates are applied. @@ -671,10 +671,10 @@ class ExecStep(proto.Message): r"""A step that runs an executable for a PatchJob. Attributes: - linux_exec_step_config (~.gco_patch_jobs.ExecStepConfig): + linux_exec_step_config (google.cloud.osconfig_v1.types.ExecStepConfig): The ExecStepConfig for all Linux VMs targeted by the PatchJob. - windows_exec_step_config (~.gco_patch_jobs.ExecStepConfig): + windows_exec_step_config (google.cloud.osconfig_v1.types.ExecStepConfig): The ExecStepConfig for all Windows VMs targeted by the PatchJob. """ @@ -694,13 +694,13 @@ class ExecStepConfig(proto.Message): Attributes: local_path (str): An absolute path to the executable on the VM. - gcs_object (~.gco_patch_jobs.GcsObject): + gcs_object (google.cloud.osconfig_v1.types.GcsObject): A Cloud Storage object containing the executable. allowed_success_codes (Sequence[int]): Defaults to [0]. A list of possible return values that the execution can return to indicate a success. - interpreter (~.gco_patch_jobs.ExecStepConfig.Interpreter): + interpreter (google.cloud.osconfig_v1.types.ExecStepConfig.Interpreter): The script interpreter to use to run the script. If no interpreter is specified the script will be executed directly, which will likely only succeed for scripts with @@ -757,7 +757,7 @@ class PatchInstanceFilter(proto.Message): all_ (bool): Target all VM instances in the project. If true, no other criteria is permitted. - group_labels (Sequence[~.gco_patch_jobs.PatchInstanceFilter.GroupLabel]): + group_labels (Sequence[google.cloud.osconfig_v1.types.PatchInstanceFilter.GroupLabel]): Targets VM instances matching ANY of these GroupLabels. This allows targeting of disparate groups of VM instances. @@ -790,7 +790,7 @@ class GroupLabel(proto.Message): and ``app=web``. Attributes: - labels (Sequence[~.gco_patch_jobs.PatchInstanceFilter.GroupLabel.LabelsEntry]): + labels (Sequence[google.cloud.osconfig_v1.types.PatchInstanceFilter.GroupLabel.LabelsEntry]): Compute Engine instance labels that must be present for a VM instance to be targeted by this filter. @@ -815,9 +815,9 @@ class PatchRollout(proto.Message): targeted VMs. Attributes: - mode (~.gco_patch_jobs.PatchRollout.Mode): + mode (google.cloud.osconfig_v1.types.PatchRollout.Mode): Mode of the patch rollout. - disruption_budget (~.osconfig_common.FixedOrPercent): + disruption_budget (google.cloud.osconfig_v1.types.FixedOrPercent): The maximum number (or percentage) of VMs per zone to disrupt at any given moment. The number of VMs calculated from multiplying the percentage by the total number of VMs diff --git a/noxfile.py b/noxfile.py index a57e24b..4d37cd3 100644 --- a/noxfile.py +++ b/noxfile.py @@ -18,6 +18,7 @@ from __future__ import absolute_import import os +import pathlib import shutil import nox @@ -30,6 +31,22 @@ SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +# 'docfx' is excluded since it only needs to run in 'docs-presubmit' +nox.options.sessions = [ + "unit", + "system", + "cover", + "lint", + "lint_setup_py", + "blacken", + "docs", +] + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + @nox.session(python=DEFAULT_PYTHON_VERSION) def lint(session): @@ -70,17 +87,21 @@ def lint_setup_py(session): def default(session): # Install all test dependencies, then install this package in-place. - session.install("asyncmock", "pytest-asyncio") - session.install( - "mock", "pytest", "pytest-cov", + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) - session.install("-e", ".") + session.install("asyncmock", "pytest-asyncio", "-c", constraints_path) + + session.install("mock", "pytest", "pytest-cov", "-c", constraints_path) + + session.install("-e", ".", "-c", constraints_path) # Run py.test against the unit tests. session.run( "py.test", "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", "--cov=google/cloud", "--cov=tests/unit", "--cov-append", @@ -101,6 +122,9 @@ def unit(session): @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") @@ -110,6 +134,9 @@ def system(session): # Sanity check: Only run tests if the environment variable is set. if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): session.skip("Credentials must be set via environment variable") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") system_test_exists = os.path.exists(system_test_path) system_test_folder_exists = os.path.exists(system_test_folder_path) @@ -122,16 +149,26 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. - session.install( - "mock", "pytest", "google-cloud-testutils", - ) - session.install("-e", ".") + session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path) + session.install("-e", ".", "-c", constraints_path) # Run py.test against the system tests. if system_test_exists: - session.run("py.test", "--quiet", system_test_path, *session.posargs) + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) if system_test_folder_exists: - session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -142,7 +179,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=99") + session.run("coverage", "report", "--show-missing", "--fail-under=98") session.run("coverage", "erase") @@ -174,9 +211,7 @@ def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") - # sphinx-docfx-yaml supports up to sphinx version 1.5.5. - # https://github.com/docascode/sphinx-docfx-yaml/issues/97 - session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml") + session.install("sphinx", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/renovate.json b/renovate.json index 4fa9493..f08bc22 100644 --- a/renovate.json +++ b/renovate.json @@ -1,5 +1,6 @@ { "extends": [ "config:base", ":preserveSemverRanges" - ] + ], + "ignorePaths": [".pre-commit-config.yaml"] } diff --git a/setup.py b/setup.py index 2f23b61..ba33fe5 100644 --- a/setup.py +++ b/setup.py @@ -31,9 +31,8 @@ # 'Development Status :: 5 - Production/Stable' release_status = "Development Status :: 4 - Beta" dependencies = [ - "google-api-core[grpc] >= 1.22.0, < 2.0.0dev", + "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", "proto-plus >= 1.10.0", - "libcst >= 0.2.5", ] extras = {} diff --git a/synth.metadata b/synth.metadata index 16b00f5..17601c3 100644 --- a/synth.metadata +++ b/synth.metadata @@ -3,30 +3,30 @@ { "git": { "name": ".", - "remote": "https://github.com/googleapis/python-os-config.git", - "sha": "628ada4004b1add04f5c2d95b9b1cad48616cf2c" + "remote": "git@github.com:googleapis/python-os-config", + "sha": "76da7f79463b691df93002851338839a547d121a" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "3f87da2ed1ddc3566ef0810c4fc06a2682cc9f5f", - "internalRef": "343022252" + "sha": "95dd24960cf9f794ef583e59ad9f1fabe1c4a924", + "internalRef": "365882072" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "373861061648b5fe5e0ac4f8a38b32d639ee93e4" + "sha": "551dd78ca04f7989abc9e63e392f8b8cfa1a0ef9" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "373861061648b5fe5e0ac4f8a38b32d639ee93e4" + "sha": "551dd78ca04f7989abc9e63e392f8b8cfa1a0ef9" } } ], @@ -37,102 +37,8 @@ "apiName": "osconfig", "apiVersion": "v1", "language": "python", - "generator": "gapic-generator-python" + "generator": "bazel" } } - ], - "generatedFiles": [ - ".coveragerc", - ".flake8", - ".github/CONTRIBUTING.md", - ".github/ISSUE_TEMPLATE/bug_report.md", - ".github/ISSUE_TEMPLATE/feature_request.md", - ".github/ISSUE_TEMPLATE/support_request.md", - ".github/PULL_REQUEST_TEMPLATE.md", - ".github/release-please.yml", - ".github/snippet-bot.yml", - ".gitignore", - ".kokoro/build.sh", - ".kokoro/continuous/common.cfg", - ".kokoro/continuous/continuous.cfg", - ".kokoro/docker/docs/Dockerfile", - ".kokoro/docker/docs/fetch_gpg_keys.sh", - ".kokoro/docs/common.cfg", - ".kokoro/docs/docs-presubmit.cfg", - ".kokoro/docs/docs.cfg", - ".kokoro/populate-secrets.sh", - ".kokoro/presubmit/common.cfg", - ".kokoro/presubmit/presubmit.cfg", - ".kokoro/publish-docs.sh", - ".kokoro/release.sh", - ".kokoro/release/common.cfg", - ".kokoro/release/release.cfg", - ".kokoro/samples/lint/common.cfg", - ".kokoro/samples/lint/continuous.cfg", - ".kokoro/samples/lint/periodic.cfg", - ".kokoro/samples/lint/presubmit.cfg", - ".kokoro/samples/python3.6/common.cfg", - ".kokoro/samples/python3.6/continuous.cfg", - ".kokoro/samples/python3.6/periodic.cfg", - ".kokoro/samples/python3.6/presubmit.cfg", - ".kokoro/samples/python3.7/common.cfg", - ".kokoro/samples/python3.7/continuous.cfg", - ".kokoro/samples/python3.7/periodic.cfg", - ".kokoro/samples/python3.7/presubmit.cfg", - ".kokoro/samples/python3.8/common.cfg", - ".kokoro/samples/python3.8/continuous.cfg", - ".kokoro/samples/python3.8/periodic.cfg", - ".kokoro/samples/python3.8/presubmit.cfg", - ".kokoro/test-samples.sh", - ".kokoro/trampoline.sh", - ".kokoro/trampoline_v2.sh", - ".pre-commit-config.yaml", - ".trampolinerc", - "CODE_OF_CONDUCT.md", - "CONTRIBUTING.rst", - "LICENSE", - "MANIFEST.in", - "docs/_static/custom.css", - "docs/_templates/layout.html", - "docs/conf.py", - "docs/multiprocessing.rst", - "docs/osconfig_v1/services.rst", - "docs/osconfig_v1/types.rst", - "google/cloud/osconfig/__init__.py", - "google/cloud/osconfig/py.typed", - "google/cloud/osconfig_v1/__init__.py", - "google/cloud/osconfig_v1/py.typed", - "google/cloud/osconfig_v1/services/__init__.py", - "google/cloud/osconfig_v1/services/os_config_service/__init__.py", - "google/cloud/osconfig_v1/services/os_config_service/async_client.py", - "google/cloud/osconfig_v1/services/os_config_service/client.py", - "google/cloud/osconfig_v1/services/os_config_service/pagers.py", - "google/cloud/osconfig_v1/services/os_config_service/transports/__init__.py", - "google/cloud/osconfig_v1/services/os_config_service/transports/base.py", - "google/cloud/osconfig_v1/services/os_config_service/transports/grpc.py", - "google/cloud/osconfig_v1/services/os_config_service/transports/grpc_asyncio.py", - "google/cloud/osconfig_v1/types/__init__.py", - "google/cloud/osconfig_v1/types/inventory.py", - "google/cloud/osconfig_v1/types/osconfig_common.py", - "google/cloud/osconfig_v1/types/osconfig_service.py", - "google/cloud/osconfig_v1/types/patch_deployments.py", - "google/cloud/osconfig_v1/types/patch_jobs.py", - "mypy.ini", - "noxfile.py", - "renovate.json", - "samples/AUTHORING_GUIDE.md", - "samples/CONTRIBUTING.md", - "scripts/decrypt-secrets.sh", - "scripts/fixup_osconfig_v1_keywords.py", - "scripts/readme-gen/readme_gen.py", - "scripts/readme-gen/templates/README.tmpl.rst", - "scripts/readme-gen/templates/auth.tmpl.rst", - "scripts/readme-gen/templates/auth_api_key.tmpl.rst", - "scripts/readme-gen/templates/install_deps.tmpl.rst", - "scripts/readme-gen/templates/install_portaudio.tmpl.rst", - "setup.cfg", - "testing/.gitignore", - "tests/unit/gapic/osconfig_v1/__init__.py", - "tests/unit/gapic/osconfig_v1/test_os_config_service.py" ] } \ No newline at end of file diff --git a/synth.py b/synth.py index 57c869b..73dab6f 100644 --- a/synth.py +++ b/synth.py @@ -51,11 +51,11 @@ templated_files = common.py_library( samples=False, microgenerator=True, - cov_level=99, + cov_level=98, ) s.move( templated_files, excludes=[".coveragerc"] ) # the microgenerator has a good coveragerc file -s.shell.run(["nox", "-s", "blacken"], hide_output=False) \ No newline at end of file +s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index 08972fd..69e1c13 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -5,6 +5,5 @@ # # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 -google-api-core==1.22.0 +google-api-core==1.22.2 proto-plus==1.10.0 -libcst==0.2.5 \ No newline at end of file diff --git a/tests/unit/gapic/osconfig_v1/__init__.py b/tests/unit/gapic/osconfig_v1/__init__.py index 8b13789..42ffdf2 100644 --- a/tests/unit/gapic/osconfig_v1/__init__.py +++ b/tests/unit/gapic/osconfig_v1/__init__.py @@ -1 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/osconfig_v1/test_os_config_service.py b/tests/unit/gapic/osconfig_v1/test_os_config_service.py index 2affe12..9bac468 100644 --- a/tests/unit/gapic/osconfig_v1/test_os_config_service.py +++ b/tests/unit/gapic/osconfig_v1/test_os_config_service.py @@ -93,21 +93,25 @@ def test__get_default_mtls_endpoint(): ) -def test_os_config_service_client_from_service_account_info(): +@pytest.mark.parametrize( + "client_class", [OsConfigServiceClient, OsConfigServiceAsyncClient,] +) +def test_os_config_service_client_from_service_account_info(client_class): creds = credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = OsConfigServiceClient.from_service_account_info(info) + client = client_class.from_service_account_info(info) assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "osconfig.googleapis.com:443" @pytest.mark.parametrize( - "client_class", [OsConfigServiceClient, OsConfigServiceAsyncClient] + "client_class", [OsConfigServiceClient, OsConfigServiceAsyncClient,] ) def test_os_config_service_client_from_service_account_file(client_class): creds = credentials.AnonymousCredentials() @@ -117,16 +121,21 @@ def test_os_config_service_client_from_service_account_file(client_class): factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "osconfig.googleapis.com:443" def test_os_config_service_client_get_transport_class(): transport = OsConfigServiceClient.get_transport_class() - assert transport == transports.OsConfigServiceGrpcTransport + available_transports = [ + transports.OsConfigServiceGrpcTransport, + ] + assert transport in available_transports transport = OsConfigServiceClient.get_transport_class("grpc") assert transport == transports.OsConfigServiceGrpcTransport @@ -177,7 +186,7 @@ def test_os_config_service_client_client_options( credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -193,7 +202,7 @@ def test_os_config_service_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -209,7 +218,7 @@ def test_os_config_service_client_client_options( credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -237,7 +246,7 @@ def test_os_config_service_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -298,29 +307,25 @@ def test_os_config_service_client_mtls_env_auto( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - ssl_channel_creds = mock.Mock() - with mock.patch( - "grpc.ssl_channel_credentials", return_value=ssl_channel_creds - ): - patched.return_value = None - client = client_class(client_options=options) + patched.return_value = None + client = client_class(client_options=options) - if use_client_cert_env == "false": - expected_ssl_channel_creds = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_ssl_channel_creds = ssl_channel_creds - expected_host = client.DEFAULT_MTLS_ENDPOINT + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. @@ -329,66 +334,53 @@ def test_os_config_service_client_mtls_env_auto( ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, ): with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.ssl_credentials", - new_callable=mock.PropertyMock, - ) as ssl_credentials_mock: - if use_client_cert_env == "false": - is_mtls_mock.return_value = False - ssl_credentials_mock.return_value = None - expected_host = client.DEFAULT_ENDPOINT - expected_ssl_channel_creds = None - else: - is_mtls_mock.return_value = True - ssl_credentials_mock.return_value = mock.Mock() - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_ssl_channel_creds = ( - ssl_credentials_mock.return_value - ) - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None - ): - with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - is_mtls_mock.return_value = False patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=expected_host, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -414,7 +406,7 @@ def test_os_config_service_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -444,7 +436,7 @@ def test_os_config_service_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -463,7 +455,7 @@ def test_os_config_service_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -529,6 +521,24 @@ def test_execute_patch_job_from_dict(): test_execute_patch_job(request_type=dict) +def test_execute_patch_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.execute_patch_job), "__call__" + ) as call: + client.execute_patch_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == patch_jobs.ExecutePatchJobRequest() + + @pytest.mark.asyncio async def test_execute_patch_job_async( transport: str = "grpc_asyncio", request_type=patch_jobs.ExecutePatchJobRequest @@ -703,6 +713,22 @@ def test_get_patch_job_from_dict(): test_get_patch_job(request_type=dict) +def test_get_patch_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_patch_job), "__call__") as call: + client.get_patch_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == patch_jobs.GetPatchJobRequest() + + @pytest.mark.asyncio async def test_get_patch_job_async( transport: str = "grpc_asyncio", request_type=patch_jobs.GetPatchJobRequest @@ -936,6 +962,22 @@ def test_cancel_patch_job_from_dict(): test_cancel_patch_job(request_type=dict) +def test_cancel_patch_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_patch_job), "__call__") as call: + client.cancel_patch_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == patch_jobs.CancelPatchJobRequest() + + @pytest.mark.asyncio async def test_cancel_patch_job_async( transport: str = "grpc_asyncio", request_type=patch_jobs.CancelPatchJobRequest @@ -1083,6 +1125,22 @@ def test_list_patch_jobs_from_dict(): test_list_patch_jobs(request_type=dict) +def test_list_patch_jobs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_patch_jobs), "__call__") as call: + client.list_patch_jobs() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == patch_jobs.ListPatchJobsRequest() + + @pytest.mark.asyncio async def test_list_patch_jobs_async( transport: str = "grpc_asyncio", request_type=patch_jobs.ListPatchJobsRequest @@ -1416,6 +1474,24 @@ def test_list_patch_job_instance_details_from_dict(): test_list_patch_job_instance_details(request_type=dict) +def test_list_patch_job_instance_details_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_patch_job_instance_details), "__call__" + ) as call: + client.list_patch_job_instance_details() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == patch_jobs.ListPatchJobInstanceDetailsRequest() + + @pytest.mark.asyncio async def test_list_patch_job_instance_details_async( transport: str = "grpc_asyncio", @@ -1802,6 +1878,24 @@ def test_create_patch_deployment_from_dict(): test_create_patch_deployment(request_type=dict) +def test_create_patch_deployment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_patch_deployment), "__call__" + ) as call: + client.create_patch_deployment() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == patch_deployments.CreatePatchDeploymentRequest() + + @pytest.mark.asyncio async def test_create_patch_deployment_async( transport: str = "grpc_asyncio", @@ -2044,6 +2138,24 @@ def test_get_patch_deployment_from_dict(): test_get_patch_deployment(request_type=dict) +def test_get_patch_deployment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_patch_deployment), "__call__" + ) as call: + client.get_patch_deployment() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == patch_deployments.GetPatchDeploymentRequest() + + @pytest.mark.asyncio async def test_get_patch_deployment_async( transport: str = "grpc_asyncio", @@ -2254,6 +2366,24 @@ def test_list_patch_deployments_from_dict(): test_list_patch_deployments(request_type=dict) +def test_list_patch_deployments_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_patch_deployments), "__call__" + ) as call: + client.list_patch_deployments() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == patch_deployments.ListPatchDeploymentsRequest() + + @pytest.mark.asyncio async def test_list_patch_deployments_async( transport: str = "grpc_asyncio", @@ -2627,6 +2757,24 @@ def test_delete_patch_deployment_from_dict(): test_delete_patch_deployment(request_type=dict) +def test_delete_patch_deployment_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = OsConfigServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_patch_deployment), "__call__" + ) as call: + client.delete_patch_deployment() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == patch_deployments.DeletePatchDeploymentRequest() + + @pytest.mark.asyncio async def test_delete_patch_deployment_async( transport: str = "grpc_asyncio", @@ -2953,6 +3101,51 @@ def test_os_config_service_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.OsConfigServiceGrpcTransport, + transports.OsConfigServiceGrpcAsyncIOTransport, + ], +) +def test_os_config_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + def test_os_config_service_host_no_port(): client = OsConfigServiceClient( credentials=credentials.AnonymousCredentials(), @@ -2974,7 +3167,7 @@ def test_os_config_service_host_with_port(): def test_os_config_service_grpc_transport_channel(): - channel = grpc.insecure_channel("http://localhost/") + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.OsConfigServiceGrpcTransport( @@ -2986,7 +3179,7 @@ def test_os_config_service_grpc_transport_channel(): def test_os_config_service_grpc_asyncio_transport_channel(): - channel = aio.insecure_channel("http://localhost/") + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.OsConfigServiceGrpcAsyncIOTransport( @@ -2997,6 +3190,8 @@ def test_os_config_service_grpc_asyncio_transport_channel(): assert transport._ssl_channel_credentials == None +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -3011,7 +3206,7 @@ def test_os_config_service_transport_channel_mtls_with_client_cert_source( "grpc.ssl_channel_credentials", autospec=True ) as grpc_ssl_channel_cred: with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -3049,6 +3244,8 @@ def test_os_config_service_transport_channel_mtls_with_client_cert_source( assert transport._ssl_channel_credentials == mock_ssl_cred +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -3064,7 +3261,7 @@ def test_os_config_service_transport_channel_mtls_with_adc(transport_class): ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel