diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..a4d1fdb --- /dev/null +++ b/.coveragerc @@ -0,0 +1,18 @@ +[run] +branch = True + +[report] +fail_under = 100 +show_missing = True +omit = + google/cloud/functions/__init__.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound diff --git a/.gitignore b/.gitignore index b9daa52..b4243ce 100644 --- a/.gitignore +++ b/.gitignore @@ -50,8 +50,10 @@ docs.metadata # Virtual environment env/ + +# Test logs coverage.xml -sponge_log.xml +*sponge_log.xml # System test environment variables. system_tests/local_test_setup diff --git a/.kokoro/build.sh b/.kokoro/build.sh index 4740e8e..ce64bf2 100755 --- a/.kokoro/build.sh +++ b/.kokoro/build.sh @@ -40,6 +40,16 @@ python3 -m pip uninstall --yes --quiet nox-automation python3 -m pip install --upgrade --quiet nox python3 -m nox --version +# If this is a continuous build, send the test log to the FlakyBot. +# See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"continuous"* ]]; then + cleanup() { + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + } + trap cleanup EXIT HUP +fi + # If NOX_SESSION is set, it only runs the specified session, # otherwise run all the sessions. if [[ -n "${NOX_SESSION:-}" ]]; then diff --git a/.kokoro/samples/python3.6/periodic-head.cfg b/.kokoro/samples/python3.6/periodic-head.cfg new file mode 100644 index 0000000..f9cfcd3 --- /dev/null +++ b/.kokoro/samples/python3.6/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.7/periodic-head.cfg b/.kokoro/samples/python3.7/periodic-head.cfg new file mode 100644 index 0000000..f9cfcd3 --- /dev/null +++ b/.kokoro/samples/python3.7/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/samples/python3.8/periodic-head.cfg b/.kokoro/samples/python3.8/periodic-head.cfg new file mode 100644 index 0000000..f9cfcd3 --- /dev/null +++ b/.kokoro/samples/python3.8/periodic-head.cfg @@ -0,0 +1,11 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-pubsub/.kokoro/test-samples-against-head.sh" +} diff --git a/.kokoro/test-samples-against-head.sh b/.kokoro/test-samples-against-head.sh new file mode 100755 index 0000000..6c61ffa --- /dev/null +++ b/.kokoro/test-samples-against-head.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# A customized test runner for samples. +# +# For periodic builds, you can specify this file for testing against head. + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +cd github/python-functions + +exec .kokoro/test-samples-impl.sh diff --git a/.kokoro/test-samples-impl.sh b/.kokoro/test-samples-impl.sh new file mode 100755 index 0000000..cf5de74 --- /dev/null +++ b/.kokoro/test-samples-impl.sh @@ -0,0 +1,102 @@ +#!/bin/bash +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +# Exit early if samples directory doesn't exist +if [ ! -d "./samples" ]; then + echo "No tests run. `./samples` not found" + exit 0 +fi + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Install nox +python3.6 -m pip install --upgrade --quiet nox + +# Use secrets acessor service account to get secrets +if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then + gcloud auth activate-service-account \ + --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ + --project="cloud-devrel-kokoro-resources" +fi + +# This script will create 3 files: +# - testing/test-env.sh +# - testing/service-account.json +# - testing/client-secrets.json +./scripts/decrypt-secrets.sh + +source ./testing/test-env.sh +export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json + +# For cloud-run session, we activate the service account for gcloud sdk. +gcloud auth activate-service-account \ + --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" + +export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json + +echo -e "\n******************** TESTING PROJECTS ********************" + +# Switch to 'fail at end' to allow all tests to complete before exiting. +set +e +# Use RTN to return a non-zero value if the test fails. +RTN=0 +ROOT=$(pwd) +# Find all requirements.txt in the samples directory (may break on whitespace). +for file in samples/**/requirements.txt; do + cd "$ROOT" + # Navigate to the project folder. + file=$(dirname "$file") + cd "$file" + + echo "------------------------------------------------------------" + echo "- testing $file" + echo "------------------------------------------------------------" + + # Use nox to execute the tests for the project. + python3.6 -m nox -s "$RUN_TESTS_SESSION" + EXIT=$? + + # If this is a periodic build, send the test log to the FlakyBot. + # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. + if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot + $KOKORO_GFILE_DIR/linux_amd64/flakybot + fi + + if [[ $EXIT -ne 0 ]]; then + RTN=1 + echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" + else + echo -e "\n Testing completed.\n" + fi + +done +cd "$ROOT" + +# Workaround for Kokoro permissions issue: delete secrets +rm testing/{test-env.sh,client-secrets.json,service-account.json} + +exit "$RTN" diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh index fe5554a..c206906 100755 --- a/.kokoro/test-samples.sh +++ b/.kokoro/test-samples.sh @@ -13,6 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. +# The default test runner for samples. +# +# For periodic builds, we rewinds the repo to the latest release, and +# run test-samples-impl.sh. # `-e` enables the script to automatically fail when a command fails # `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero @@ -24,87 +28,19 @@ cd github/python-functions # Run periodic samples tests at latest release if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + # preserving the test runner implementation. + cp .kokoro/test-samples-impl.sh "${TMPDIR}/test-samples-impl.sh" + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + echo "Now we rewind the repo back to the latest release..." LATEST_RELEASE=$(git describe --abbrev=0 --tags) git checkout $LATEST_RELEASE -fi - -# Exit early if samples directory doesn't exist -if [ ! -d "./samples" ]; then - echo "No tests run. `./samples` not found" - exit 0 -fi - -# Disable buffering, so that the logs stream through. -export PYTHONUNBUFFERED=1 - -# Debug: show build environment -env | grep KOKORO - -# Install nox -python3.6 -m pip install --upgrade --quiet nox - -# Use secrets acessor service account to get secrets -if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then - gcloud auth activate-service-account \ - --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ - --project="cloud-devrel-kokoro-resources" -fi - -# This script will create 3 files: -# - testing/test-env.sh -# - testing/service-account.json -# - testing/client-secrets.json -./scripts/decrypt-secrets.sh - -source ./testing/test-env.sh -export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json - -# For cloud-run session, we activate the service account for gcloud sdk. -gcloud auth activate-service-account \ - --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" - -export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json - -echo -e "\n******************** TESTING PROJECTS ********************" - -# Switch to 'fail at end' to allow all tests to complete before exiting. -set +e -# Use RTN to return a non-zero value if the test fails. -RTN=0 -ROOT=$(pwd) -# Find all requirements.txt in the samples directory (may break on whitespace). -for file in samples/**/requirements.txt; do - cd "$ROOT" - # Navigate to the project folder. - file=$(dirname "$file") - cd "$file" - - echo "------------------------------------------------------------" - echo "- testing $file" - echo "------------------------------------------------------------" - - # Use nox to execute the tests for the project. - python3.6 -m nox -s "$RUN_TESTS_SESSION" - EXIT=$? - - # If this is a periodic build, send the test log to the FlakyBot. - # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/flakybot. - if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then - chmod +x $KOKORO_GFILE_DIR/linux_amd64/flakybot - $KOKORO_GFILE_DIR/linux_amd64/flakybot + echo "The current head is: " + echo $(git rev-parse --verify HEAD) + echo "--- IMPORTANT IMPORTANT IMPORTANT ---" + # move back the test runner implementation if there's no file. + if [ ! -f .kokoro/test-samples-impl.sh ]; then + cp "${TMPDIR}/test-samples-impl.sh" .kokoro/test-samples-impl.sh fi +fi - if [[ $EXIT -ne 0 ]]; then - RTN=1 - echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" - else - echo -e "\n Testing completed.\n" - fi - -done -cd "$ROOT" - -# Workaround for Kokoro permissions issue: delete secrets -rm testing/{test-env.sh,client-secrets.json,service-account.json} - -exit "$RTN" +exec .kokoro/test-samples-impl.sh diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index a9024b1..32302e4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -12,6 +12,6 @@ repos: hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 - rev: 3.8.4 + rev: 3.9.0 hooks: - id: flake8 diff --git a/docs/functions_v1/cloud_functions_service.rst b/docs/functions_v1/cloud_functions_service.rst new file mode 100644 index 0000000..e20b86c --- /dev/null +++ b/docs/functions_v1/cloud_functions_service.rst @@ -0,0 +1,11 @@ +CloudFunctionsService +--------------------------------------- + +.. automodule:: google.cloud.functions_v1.services.cloud_functions_service + :members: + :inherited-members: + + +.. automodule:: google.cloud.functions_v1.services.cloud_functions_service.pagers + :members: + :inherited-members: diff --git a/docs/functions_v1/services.rst b/docs/functions_v1/services.rst index a0f5ce0..89ae970 100644 --- a/docs/functions_v1/services.rst +++ b/docs/functions_v1/services.rst @@ -1,6 +1,6 @@ Services for Google Cloud Functions v1 API ========================================== +.. toctree:: + :maxdepth: 2 -.. automodule:: google.cloud.functions_v1.services.cloud_functions_service - :members: - :inherited-members: + cloud_functions_service diff --git a/docs/functions_v1/types.rst b/docs/functions_v1/types.rst index e2d24d0..bebdfe9 100644 --- a/docs/functions_v1/types.rst +++ b/docs/functions_v1/types.rst @@ -3,4 +3,5 @@ Types for Google Cloud Functions v1 API .. automodule:: google.cloud.functions_v1.types :members: + :undoc-members: :show-inheritance: diff --git a/google/cloud/functions_v1/services/cloud_functions_service/async_client.py b/google/cloud/functions_v1/services/cloud_functions_service/async_client.py index 943640a..5e05ca3 100644 --- a/google/cloud/functions_v1/services/cloud_functions_service/async_client.py +++ b/google/cloud/functions_v1/services/cloud_functions_service/async_client.py @@ -90,7 +90,36 @@ class CloudFunctionsServiceAsyncClient: CloudFunctionsServiceClient.parse_common_location_path ) - from_service_account_file = CloudFunctionsServiceClient.from_service_account_file + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudFunctionsServiceAsyncClient: The constructed client. + """ + return CloudFunctionsServiceClient.from_service_account_info.__func__(CloudFunctionsServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudFunctionsServiceAsyncClient: The constructed client. + """ + return CloudFunctionsServiceClient.from_service_account_file.__func__(CloudFunctionsServiceAsyncClient, filename, *args, **kwargs) # type: ignore + from_service_account_json = from_service_account_file @property @@ -167,7 +196,7 @@ async def list_functions( requested project. Args: - request (:class:`~.functions.ListFunctionsRequest`): + request (:class:`google.cloud.functions_v1.types.ListFunctionsRequest`): The request object. Request for the `ListFunctions` method. @@ -178,8 +207,8 @@ async def list_functions( sent along with the request as metadata. Returns: - ~.pagers.ListFunctionsAsyncPager: - Response for the ``ListFunctions`` method. + google.cloud.functions_v1.services.cloud_functions_service.pagers.ListFunctionsAsyncPager: + Response for the ListFunctions method. Iterating over this object will yield results and resolve additional pages automatically. @@ -193,7 +222,16 @@ async def list_functions( # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_functions, - default_timeout=None, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, ) @@ -228,12 +266,13 @@ async def get_function( requested project. Args: - request (:class:`~.functions.GetFunctionRequest`): + request (:class:`google.cloud.functions_v1.types.GetFunctionRequest`): The request object. Request for the `GetFunction` method. name (:class:`str`): Required. The name of the function which details should be obtained. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -245,7 +284,7 @@ async def get_function( sent along with the request as metadata. Returns: - ~.functions.CloudFunction: + google.cloud.functions_v1.types.CloudFunction: Describes a Cloud Function that contains user computation executed in response to an event. It encapsulate @@ -274,7 +313,16 @@ async def get_function( # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_function, - default_timeout=None, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, ) @@ -305,17 +353,18 @@ async def create_function( operation will return ``ALREADY_EXISTS`` error. Args: - request (:class:`~.functions.CreateFunctionRequest`): + request (:class:`google.cloud.functions_v1.types.CreateFunctionRequest`): The request object. Request for the `CreateFunction` method. location (:class:`str`): Required. The project and location in which the function should be created, specified in the format ``projects/*/locations/*`` + This corresponds to the ``location`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - function (:class:`~.functions.CloudFunction`): + function (:class:`google.cloud.functions_v1.types.CloudFunction`): Required. Function to be created. This corresponds to the ``function`` field on the ``request`` instance; if ``request`` is provided, this @@ -328,14 +377,12 @@ async def create_function( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.functions.CloudFunction``: Describes a Cloud - Function that contains user computation executed in - response to an event. It encapsulate function and - triggers configurations. + The result type for the operation will be :class:`google.cloud.functions_v1.types.CloudFunction` Describes a Cloud Function that contains user computation executed in + response to an event. It encapsulate function and + triggers configurations. """ # Create or coerce a protobuf request object. @@ -362,7 +409,7 @@ async def create_function( # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_function, - default_timeout=None, + default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, ) @@ -398,12 +445,13 @@ async def update_function( r"""Updates existing function. Args: - request (:class:`~.functions.UpdateFunctionRequest`): + request (:class:`google.cloud.functions_v1.types.UpdateFunctionRequest`): The request object. Request for the `UpdateFunction` method. - function (:class:`~.functions.CloudFunction`): + function (:class:`google.cloud.functions_v1.types.CloudFunction`): Required. New version of the function. + This corresponds to the ``function`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -415,14 +463,12 @@ async def update_function( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.functions.CloudFunction``: Describes a Cloud - Function that contains user computation executed in - response to an event. It encapsulate function and - triggers configurations. + The result type for the operation will be :class:`google.cloud.functions_v1.types.CloudFunction` Describes a Cloud Function that contains user computation executed in + response to an event. It encapsulate function and + triggers configurations. """ # Create or coerce a protobuf request object. @@ -447,7 +493,16 @@ async def update_function( # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_function, - default_timeout=None, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, ) @@ -488,12 +543,13 @@ async def delete_function( function. Args: - request (:class:`~.functions.DeleteFunctionRequest`): + request (:class:`google.cloud.functions_v1.types.DeleteFunctionRequest`): The request object. Request for the `DeleteFunction` method. name (:class:`str`): Required. The name of the function which should be deleted. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -505,24 +561,22 @@ async def delete_function( sent along with the request as metadata. Returns: - ~.operation_async.AsyncOperation: + google.api_core.operation_async.AsyncOperation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.empty.Empty``: A generic empty message that - you can re-use to avoid defining duplicated empty - messages in your APIs. A typical example is to use it as - the request or the response type of an API method. For - instance: + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: - :: + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); - service Foo { - rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); - } + } - The JSON representation for ``Empty`` is empty JSON - object ``{}``. + The JSON representation for Empty is empty JSON + object {}. """ # Create or coerce a protobuf request object. @@ -547,7 +601,16 @@ async def delete_function( # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_function, - default_timeout=None, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, ) @@ -587,18 +650,20 @@ async def call_function( Limits `__. Args: - request (:class:`~.functions.CallFunctionRequest`): + request (:class:`google.cloud.functions_v1.types.CallFunctionRequest`): The request object. Request for the `CallFunction` method. name (:class:`str`): Required. The name of the function to be called. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. data (:class:`str`): Required. Input to be passed to the function. + This corresponds to the ``data`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -610,8 +675,8 @@ async def call_function( sent along with the request as metadata. Returns: - ~.functions.CallFunctionResponse: - Response of ``CallFunction`` method. + google.cloud.functions_v1.types.CallFunctionResponse: + Response of CallFunction method. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have @@ -637,7 +702,7 @@ async def call_function( # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.call_function, - default_timeout=None, + default_timeout=600.0, client_info=DEFAULT_CLIENT_INFO, ) @@ -691,7 +756,7 @@ async def generate_upload_url( - ``Authorization: Bearer YOUR_TOKEN`` Args: - request (:class:`~.functions.GenerateUploadUrlRequest`): + request (:class:`google.cloud.functions_v1.types.GenerateUploadUrlRequest`): The request object. Request of `GenerateSourceUploadUrl` method. @@ -702,8 +767,8 @@ async def generate_upload_url( sent along with the request as metadata. Returns: - ~.functions.GenerateUploadUrlResponse: - Response of ``GenerateSourceUploadUrl`` method. + google.cloud.functions_v1.types.GenerateUploadUrlResponse: + Response of GenerateSourceUploadUrl method. """ # Create or coerce a protobuf request object. @@ -746,7 +811,7 @@ async def generate_download_url( control/signed-urls Args: - request (:class:`~.functions.GenerateDownloadUrlRequest`): + request (:class:`google.cloud.functions_v1.types.GenerateDownloadUrlRequest`): The request object. Request of `GenerateDownloadUrl` method. @@ -757,8 +822,8 @@ async def generate_download_url( sent along with the request as metadata. Returns: - ~.functions.GenerateDownloadUrlResponse: - Response of ``GenerateDownloadUrl`` method. + google.cloud.functions_v1.types.GenerateDownloadUrlResponse: + Response of GenerateDownloadUrl method. """ # Create or coerce a protobuf request object. @@ -796,7 +861,7 @@ async def set_iam_policy( function. Replaces any existing policy. Args: - request (:class:`~.iam_policy.SetIamPolicyRequest`): + request (:class:`google.iam.v1.iam_policy_pb2.SetIamPolicyRequest`): The request object. Request message for `SetIamPolicy` method. @@ -807,72 +872,62 @@ async def set_iam_policy( sent along with the request as metadata. Returns: - ~.policy.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). """ # Create or coerce a protobuf request object. @@ -915,7 +970,7 @@ async def get_iam_policy( not have a policy set. Args: - request (:class:`~.iam_policy.GetIamPolicyRequest`): + request (:class:`google.iam.v1.iam_policy_pb2.GetIamPolicyRequest`): The request object. Request message for `GetIamPolicy` method. @@ -926,72 +981,62 @@ async def get_iam_policy( sent along with the request as metadata. Returns: - ~.policy.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). """ # Create or coerce a protobuf request object. @@ -1034,7 +1079,7 @@ async def test_iam_permissions( return an empty set of permissions, not a NOT_FOUND error. Args: - request (:class:`~.iam_policy.TestIamPermissionsRequest`): + request (:class:`google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest`): The request object. Request message for `TestIamPermissions` method. @@ -1045,8 +1090,8 @@ async def test_iam_permissions( sent along with the request as metadata. Returns: - ~.iam_policy.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. """ # Create or coerce a protobuf request object. diff --git a/google/cloud/functions_v1/services/cloud_functions_service/client.py b/google/cloud/functions_v1/services/cloud_functions_service/client.py index bdeb8b1..3202d24 100644 --- a/google/cloud/functions_v1/services/cloud_functions_service/client.py +++ b/google/cloud/functions_v1/services/cloud_functions_service/client.py @@ -122,6 +122,22 @@ def _get_default_mtls_endpoint(api_endpoint): DEFAULT_ENDPOINT ) + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + CloudFunctionsServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials @@ -134,7 +150,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): kwargs: Additional arguments to pass to the constructor. Returns: - {@api.name}: The constructed client. + CloudFunctionsServiceClient: The constructed client. """ credentials = service_account.Credentials.from_service_account_file(filename) kwargs["credentials"] = credentials @@ -242,10 +258,10 @@ def __init__( credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. - transport (Union[str, ~.CloudFunctionsServiceTransport]): The + transport (Union[str, CloudFunctionsServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (client_options_lib.ClientOptions): Custom options for the + client_options (google.api_core.client_options.ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT @@ -281,21 +297,17 @@ def __init__( util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) ) - ssl_credentials = None + client_cert_source_func = None is_mtls = False if use_client_cert: if client_options.client_cert_source: - import grpc # type: ignore - - cert, key = client_options.client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) is_mtls = True + client_cert_source_func = client_options.client_cert_source else: - creds = SslCredentials() - is_mtls = creds.is_mtls - ssl_credentials = creds.ssl_credentials if is_mtls else None + is_mtls = mtls.has_default_client_cert_source() + client_cert_source_func = ( + mtls.default_client_cert_source() if is_mtls else None + ) # Figure out which api endpoint to use. if client_options.api_endpoint is not None: @@ -338,7 +350,7 @@ def __init__( credentials_file=client_options.credentials_file, host=api_endpoint, scopes=client_options.scopes, - ssl_channel_credentials=ssl_credentials, + client_cert_source_for_mtls=client_cert_source_func, quota_project_id=client_options.quota_project_id, client_info=client_info, ) @@ -355,7 +367,7 @@ def list_functions( requested project. Args: - request (:class:`~.functions.ListFunctionsRequest`): + request (google.cloud.functions_v1.types.ListFunctionsRequest): The request object. Request for the `ListFunctions` method. @@ -366,8 +378,8 @@ def list_functions( sent along with the request as metadata. Returns: - ~.pagers.ListFunctionsPager: - Response for the ``ListFunctions`` method. + google.cloud.functions_v1.services.cloud_functions_service.pagers.ListFunctionsPager: + Response for the ListFunctions method. Iterating over this object will yield results and resolve additional pages automatically. @@ -417,12 +429,13 @@ def get_function( requested project. Args: - request (:class:`~.functions.GetFunctionRequest`): + request (google.cloud.functions_v1.types.GetFunctionRequest): The request object. Request for the `GetFunction` method. - name (:class:`str`): + name (str): Required. The name of the function which details should be obtained. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -434,7 +447,7 @@ def get_function( sent along with the request as metadata. Returns: - ~.functions.CloudFunction: + google.cloud.functions_v1.types.CloudFunction: Describes a Cloud Function that contains user computation executed in response to an event. It encapsulate @@ -495,17 +508,18 @@ def create_function( operation will return ``ALREADY_EXISTS`` error. Args: - request (:class:`~.functions.CreateFunctionRequest`): + request (google.cloud.functions_v1.types.CreateFunctionRequest): The request object. Request for the `CreateFunction` method. - location (:class:`str`): + location (str): Required. The project and location in which the function should be created, specified in the format ``projects/*/locations/*`` + This corresponds to the ``location`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - function (:class:`~.functions.CloudFunction`): + function (google.cloud.functions_v1.types.CloudFunction): Required. Function to be created. This corresponds to the ``function`` field on the ``request`` instance; if ``request`` is provided, this @@ -518,14 +532,12 @@ def create_function( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.functions.CloudFunction``: Describes a Cloud - Function that contains user computation executed in - response to an event. It encapsulate function and - triggers configurations. + The result type for the operation will be :class:`google.cloud.functions_v1.types.CloudFunction` Describes a Cloud Function that contains user computation executed in + response to an event. It encapsulate function and + triggers configurations. """ # Create or coerce a protobuf request object. @@ -589,12 +601,13 @@ def update_function( r"""Updates existing function. Args: - request (:class:`~.functions.UpdateFunctionRequest`): + request (google.cloud.functions_v1.types.UpdateFunctionRequest): The request object. Request for the `UpdateFunction` method. - function (:class:`~.functions.CloudFunction`): + function (google.cloud.functions_v1.types.CloudFunction): Required. New version of the function. + This corresponds to the ``function`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -606,14 +619,12 @@ def update_function( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.functions.CloudFunction``: Describes a Cloud - Function that contains user computation executed in - response to an event. It encapsulate function and - triggers configurations. + The result type for the operation will be :class:`google.cloud.functions_v1.types.CloudFunction` Describes a Cloud Function that contains user computation executed in + response to an event. It encapsulate function and + triggers configurations. """ # Create or coerce a protobuf request object. @@ -680,12 +691,13 @@ def delete_function( function. Args: - request (:class:`~.functions.DeleteFunctionRequest`): + request (google.cloud.functions_v1.types.DeleteFunctionRequest): The request object. Request for the `DeleteFunction` method. - name (:class:`str`): + name (str): Required. The name of the function which should be deleted. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -697,24 +709,22 @@ def delete_function( sent along with the request as metadata. Returns: - ~.operation.Operation: + google.api_core.operation.Operation: An object representing a long-running operation. - The result type for the operation will be - :class:``~.empty.Empty``: A generic empty message that - you can re-use to avoid defining duplicated empty - messages in your APIs. A typical example is to use it as - the request or the response type of an API method. For - instance: + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: - :: + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); - service Foo { - rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); - } + } - The JSON representation for ``Empty`` is empty JSON - object ``{}``. + The JSON representation for Empty is empty JSON + object {}. """ # Create or coerce a protobuf request object. @@ -780,18 +790,20 @@ def call_function( Limits `__. Args: - request (:class:`~.functions.CallFunctionRequest`): + request (google.cloud.functions_v1.types.CallFunctionRequest): The request object. Request for the `CallFunction` method. - name (:class:`str`): + name (str): Required. The name of the function to be called. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. - data (:class:`str`): + data (str): Required. Input to be passed to the function. + This corresponds to the ``data`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -803,8 +815,8 @@ def call_function( sent along with the request as metadata. Returns: - ~.functions.CallFunctionResponse: - Response of ``CallFunction`` method. + google.cloud.functions_v1.types.CallFunctionResponse: + Response of CallFunction method. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have @@ -885,7 +897,7 @@ def generate_upload_url( - ``Authorization: Bearer YOUR_TOKEN`` Args: - request (:class:`~.functions.GenerateUploadUrlRequest`): + request (google.cloud.functions_v1.types.GenerateUploadUrlRequest): The request object. Request of `GenerateSourceUploadUrl` method. @@ -896,8 +908,8 @@ def generate_upload_url( sent along with the request as metadata. Returns: - ~.functions.GenerateUploadUrlResponse: - Response of ``GenerateSourceUploadUrl`` method. + google.cloud.functions_v1.types.GenerateUploadUrlResponse: + Response of GenerateSourceUploadUrl method. """ # Create or coerce a protobuf request object. @@ -941,7 +953,7 @@ def generate_download_url( control/signed-urls Args: - request (:class:`~.functions.GenerateDownloadUrlRequest`): + request (google.cloud.functions_v1.types.GenerateDownloadUrlRequest): The request object. Request of `GenerateDownloadUrl` method. @@ -952,8 +964,8 @@ def generate_download_url( sent along with the request as metadata. Returns: - ~.functions.GenerateDownloadUrlResponse: - Response of ``GenerateDownloadUrl`` method. + google.cloud.functions_v1.types.GenerateDownloadUrlResponse: + Response of GenerateDownloadUrl method. """ # Create or coerce a protobuf request object. @@ -992,7 +1004,7 @@ def set_iam_policy( function. Replaces any existing policy. Args: - request (:class:`~.iam_policy.SetIamPolicyRequest`): + request (google.iam.v1.iam_policy_pb2.SetIamPolicyRequest): The request object. Request message for `SetIamPolicy` method. @@ -1003,80 +1015,73 @@ def set_iam_policy( sent along with the request as metadata. Returns: - ~.policy.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). """ # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. request = iam_policy.SetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy.SetIamPolicyRequest() # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -1107,7 +1112,7 @@ def get_iam_policy( not have a policy set. Args: - request (:class:`~.iam_policy.GetIamPolicyRequest`): + request (google.iam.v1.iam_policy_pb2.GetIamPolicyRequest): The request object. Request message for `GetIamPolicy` method. @@ -1118,80 +1123,73 @@ def get_iam_policy( sent along with the request as metadata. Returns: - ~.policy.Policy: - Defines an Identity and Access Management (IAM) policy. - It is used to specify access control policies for Cloud - Platform resources. - - A ``Policy`` is a collection of ``bindings``. A - ``binding`` binds one or more ``members`` to a single - ``role``. Members can be user accounts, service - accounts, Google groups, and domains (such as G Suite). - A ``role`` is a named list of permissions (defined by - IAM or configured by users). A ``binding`` can - optionally specify a ``condition``, which is a logic - expression that further constrains the role binding - based on attributes about the request and/or target - resource. - - **JSON Example** - - :: - - { - "bindings": [ - { - "role": "roles/resourcemanager.organizationAdmin", - "members": [ - "user:mike@example.com", - "group:admins@example.com", - "domain:google.com", - "serviceAccount:my-project-id@appspot.gserviceaccount.com" - ] - }, - { - "role": "roles/resourcemanager.organizationViewer", - "members": ["user:eve@example.com"], - "condition": { - "title": "expirable access", - "description": "Does not grant access after Sep 2020", - "expression": "request.time < - timestamp('2020-10-01T00:00:00.000Z')", - } - } - ] - } - - **YAML Example** - - :: - - bindings: - - members: - - user:mike@example.com - - group:admins@example.com - - domain:google.com - - serviceAccount:my-project-id@appspot.gserviceaccount.com - role: roles/resourcemanager.organizationAdmin - - members: - - user:eve@example.com - role: roles/resourcemanager.organizationViewer - condition: - title: expirable access - description: Does not grant access after Sep 2020 - expression: request.time < timestamp('2020-10-01T00:00:00.000Z') - - For a description of IAM and its features, see the `IAM - developer's - guide `__. + google.iam.v1.policy_pb2.Policy: + Defines an Identity and Access Management (IAM) policy. It is used to + specify access control policies for Cloud Platform + resources. + + A Policy is a collection of bindings. A binding binds + one or more members to a single role. Members can be + user accounts, service accounts, Google groups, and + domains (such as G Suite). A role is a named list of + permissions (defined by IAM or configured by users). + A binding can optionally specify a condition, which + is a logic expression that further constrains the + role binding based on attributes about the request + and/or target resource. + + **JSON Example** + + { + "bindings": [ + { + "role": + "roles/resourcemanager.organizationAdmin", + "members": [ "user:mike@example.com", + "group:admins@example.com", + "domain:google.com", + "serviceAccount:my-project-id@appspot.gserviceaccount.com" + ] + + }, { "role": + "roles/resourcemanager.organizationViewer", + "members": ["user:eve@example.com"], + "condition": { "title": "expirable access", + "description": "Does not grant access after + Sep 2020", "expression": "request.time < + timestamp('2020-10-01T00:00:00.000Z')", } } + + ] + + } + + **YAML Example** + + bindings: - members: - user:\ mike@example.com - + group:\ admins@example.com - domain:google.com - + serviceAccount:\ my-project-id@appspot.gserviceaccount.com + role: roles/resourcemanager.organizationAdmin - + members: - user:\ eve@example.com role: + roles/resourcemanager.organizationViewer + condition: title: expirable access description: + Does not grant access after Sep 2020 expression: + request.time < + timestamp('2020-10-01T00:00:00.000Z') + + For a description of IAM and its features, see the + [IAM developer's + guide](\ https://cloud.google.com/iam/docs). """ # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. request = iam_policy.GetIamPolicyRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy.GetIamPolicyRequest() # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. @@ -1222,7 +1220,7 @@ def test_iam_permissions( return an empty set of permissions, not a NOT_FOUND error. Args: - request (:class:`~.iam_policy.TestIamPermissionsRequest`): + request (google.iam.v1.iam_policy_pb2.TestIamPermissionsRequest): The request object. Request message for `TestIamPermissions` method. @@ -1233,15 +1231,18 @@ def test_iam_permissions( sent along with the request as metadata. Returns: - ~.iam_policy.TestIamPermissionsResponse: - Response message for ``TestIamPermissions`` method. + google.iam.v1.iam_policy_pb2.TestIamPermissionsResponse: + Response message for TestIamPermissions method. """ # Create or coerce a protobuf request object. - # The request isn't a proto-plus wrapped type, - # so it must be constructed via keyword expansion. if isinstance(request, dict): + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. request = iam_policy.TestIamPermissionsRequest(**request) + elif not request: + # Null request, just make one. + request = iam_policy.TestIamPermissionsRequest() # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. diff --git a/google/cloud/functions_v1/services/cloud_functions_service/pagers.py b/google/cloud/functions_v1/services/cloud_functions_service/pagers.py index 86f50a4..272d877 100644 --- a/google/cloud/functions_v1/services/cloud_functions_service/pagers.py +++ b/google/cloud/functions_v1/services/cloud_functions_service/pagers.py @@ -15,7 +15,16 @@ # limitations under the License. # -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple +from typing import ( + Any, + AsyncIterable, + Awaitable, + Callable, + Iterable, + Sequence, + Tuple, + Optional, +) from google.cloud.functions_v1.types import functions @@ -24,7 +33,7 @@ class ListFunctionsPager: """A pager for iterating through ``list_functions`` requests. This class thinly wraps an initial - :class:`~.functions.ListFunctionsResponse` object, and + :class:`google.cloud.functions_v1.types.ListFunctionsResponse` object, and provides an ``__iter__`` method to iterate through its ``functions`` field. @@ -33,7 +42,7 @@ class ListFunctionsPager: through the ``functions`` field on the corresponding responses. - All the usual :class:`~.functions.ListFunctionsResponse` + All the usual :class:`google.cloud.functions_v1.types.ListFunctionsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -51,9 +60,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.functions.ListFunctionsRequest`): + request (google.cloud.functions_v1.types.ListFunctionsRequest): The initial request object. - response (:class:`~.functions.ListFunctionsResponse`): + response (google.cloud.functions_v1.types.ListFunctionsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. @@ -86,7 +95,7 @@ class ListFunctionsAsyncPager: """A pager for iterating through ``list_functions`` requests. This class thinly wraps an initial - :class:`~.functions.ListFunctionsResponse` object, and + :class:`google.cloud.functions_v1.types.ListFunctionsResponse` object, and provides an ``__aiter__`` method to iterate through its ``functions`` field. @@ -95,7 +104,7 @@ class ListFunctionsAsyncPager: through the ``functions`` field on the corresponding responses. - All the usual :class:`~.functions.ListFunctionsResponse` + All the usual :class:`google.cloud.functions_v1.types.ListFunctionsResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ @@ -113,9 +122,9 @@ def __init__( Args: method (Callable): The method that was originally called, and which instantiated this pager. - request (:class:`~.functions.ListFunctionsRequest`): + request (google.cloud.functions_v1.types.ListFunctionsRequest): The initial request object. - response (:class:`~.functions.ListFunctionsResponse`): + response (google.cloud.functions_v1.types.ListFunctionsResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. diff --git a/google/cloud/functions_v1/services/cloud_functions_service/transports/base.py b/google/cloud/functions_v1/services/cloud_functions_service/transports/base.py index bb88a6a..eb6c359 100644 --- a/google/cloud/functions_v1/services/cloud_functions_service/transports/base.py +++ b/google/cloud/functions_v1/services/cloud_functions_service/transports/base.py @@ -71,10 +71,10 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. @@ -82,6 +82,9 @@ def __init__( host += ":443" self._host = host + # Save the scopes. + self._scopes = scopes or self.AUTH_SCOPES + # If no credentials are provided, then determine the appropriate # defaults. if credentials and credentials_file: @@ -91,40 +94,81 @@ def __init__( if credentials_file is not None: credentials, _ = auth.load_credentials_from_file( - credentials_file, scopes=scopes, quota_project_id=quota_project_id + credentials_file, scopes=self._scopes, quota_project_id=quota_project_id ) elif credentials is None: credentials, _ = auth.default( - scopes=scopes, quota_project_id=quota_project_id + scopes=self._scopes, quota_project_id=quota_project_id ) # Save the credentials. self._credentials = credentials - # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.list_functions: gapic_v1.method.wrap_method( - self.list_functions, default_timeout=None, client_info=client_info, + self.list_functions, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, ), self.get_function: gapic_v1.method.wrap_method( - self.get_function, default_timeout=None, client_info=client_info, + self.get_function, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, ), self.create_function: gapic_v1.method.wrap_method( - self.create_function, default_timeout=None, client_info=client_info, + self.create_function, default_timeout=600.0, client_info=client_info, ), self.update_function: gapic_v1.method.wrap_method( - self.update_function, default_timeout=None, client_info=client_info, + self.update_function, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, ), self.delete_function: gapic_v1.method.wrap_method( - self.delete_function, default_timeout=None, client_info=client_info, + self.delete_function, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, ), self.call_function: gapic_v1.method.wrap_method( - self.call_function, default_timeout=None, client_info=client_info, + self.call_function, default_timeout=600.0, client_info=client_info, ), self.generate_upload_url: gapic_v1.method.wrap_method( self.generate_upload_url, default_timeout=None, client_info=client_info, diff --git a/google/cloud/functions_v1/services/cloud_functions_service/transports/grpc.py b/google/cloud/functions_v1/services/cloud_functions_service/transports/grpc.py index 98571f8..c299fcb 100644 --- a/google/cloud/functions_v1/services/cloud_functions_service/transports/grpc.py +++ b/google/cloud/functions_v1/services/cloud_functions_service/transports/grpc.py @@ -62,6 +62,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -92,6 +93,10 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): @@ -106,72 +111,61 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) else: - ssl_credentials = SslCredentials().ssl_credentials + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials - else: - host = host if ":" in host else host + ":443" - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -179,18 +173,8 @@ def __init__( ], ) - self._stubs = {} # type: Dict[str, Callable] - self._operations_client = None - - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @classmethod def create_channel( @@ -204,7 +188,7 @@ def create_channel( ) -> grpc.Channel: """Create and return a gRPC channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If diff --git a/google/cloud/functions_v1/services/cloud_functions_service/transports/grpc_asyncio.py b/google/cloud/functions_v1/services/cloud_functions_service/transports/grpc_asyncio.py index 6f4fe62..28ddfae 100644 --- a/google/cloud/functions_v1/services/cloud_functions_service/transports/grpc_asyncio.py +++ b/google/cloud/functions_v1/services/cloud_functions_service/transports/grpc_asyncio.py @@ -66,7 +66,7 @@ def create_channel( ) -> aio.Channel: """Create and return a gRPC AsyncIO channel object. Args: - address (Optional[str]): The host for the channel to use. + host (Optional[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If @@ -106,6 +106,7 @@ def __init__( api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: @@ -137,12 +138,16 @@ def __init__( ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing your own client library. Raises: @@ -151,72 +156,61 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._grpc_channel = None self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) if channel: - # Sanity check: Ensure that channel and credentials are not both - # provided. + # Ignore credentials if a channel was passed. credentials = False - # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None - elif api_mtls_endpoint: - warnings.warn( - "api_mtls_endpoint and client_cert_source are deprecated", - DeprecationWarning, - ) - host = ( - api_mtls_endpoint - if ":" in api_mtls_endpoint - else api_mtls_endpoint + ":443" - ) + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - ssl_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) else: - ssl_credentials = SslCredentials().ssl_credentials + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) - # create a new channel. The provided one is ignored. - self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - ssl_credentials=ssl_credentials, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - self._ssl_channel_credentials = ssl_credentials - else: - host = host if ":" in host else host + ":443" - - if credentials is None: - credentials, _ = auth.default( - scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id - ) + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + ) - # create a new channel. The provided one is ignored. + if not self._grpc_channel: self._grpc_channel = type(self).create_channel( - host, - credentials=credentials, + self._host, + credentials=self._credentials, credentials_file=credentials_file, - ssl_credentials=ssl_channel_credentials, - scopes=scopes or self.AUTH_SCOPES, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), @@ -224,18 +218,8 @@ def __init__( ], ) - # Run the base constructor. - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes or self.AUTH_SCOPES, - quota_project_id=quota_project_id, - client_info=client_info, - ) - - self._stubs = {} - self._operations_client = None + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) @property def grpc_channel(self) -> aio.Channel: diff --git a/google/cloud/functions_v1/types/__init__.py b/google/cloud/functions_v1/types/__init__.py index de5cc1a..62b2f9b 100644 --- a/google/cloud/functions_v1/types/__init__.py +++ b/google/cloud/functions_v1/types/__init__.py @@ -16,23 +16,23 @@ # from .functions import ( + CallFunctionRequest, + CallFunctionResponse, CloudFunction, - SourceRepository, - HttpsTrigger, + CreateFunctionRequest, + DeleteFunctionRequest, EventTrigger, FailurePolicy, - CreateFunctionRequest, - UpdateFunctionRequest, + GenerateDownloadUrlRequest, + GenerateDownloadUrlResponse, + GenerateUploadUrlRequest, + GenerateUploadUrlResponse, GetFunctionRequest, + HttpsTrigger, ListFunctionsRequest, ListFunctionsResponse, - DeleteFunctionRequest, - CallFunctionRequest, - CallFunctionResponse, - GenerateUploadUrlRequest, - GenerateUploadUrlResponse, - GenerateDownloadUrlRequest, - GenerateDownloadUrlResponse, + SourceRepository, + UpdateFunctionRequest, CloudFunctionStatus, ) from .operations import ( @@ -41,23 +41,23 @@ ) __all__ = ( + "CallFunctionRequest", + "CallFunctionResponse", "CloudFunction", - "SourceRepository", - "HttpsTrigger", + "CreateFunctionRequest", + "DeleteFunctionRequest", "EventTrigger", "FailurePolicy", - "CreateFunctionRequest", - "UpdateFunctionRequest", + "GenerateDownloadUrlRequest", + "GenerateDownloadUrlResponse", + "GenerateUploadUrlRequest", + "GenerateUploadUrlResponse", "GetFunctionRequest", + "HttpsTrigger", "ListFunctionsRequest", "ListFunctionsResponse", - "DeleteFunctionRequest", - "CallFunctionRequest", - "CallFunctionResponse", - "GenerateUploadUrlRequest", - "GenerateUploadUrlResponse", - "GenerateDownloadUrlRequest", - "GenerateDownloadUrlResponse", + "SourceRepository", + "UpdateFunctionRequest", "CloudFunctionStatus", "OperationMetadataV1", "OperationType", diff --git a/google/cloud/functions_v1/types/functions.py b/google/cloud/functions_v1/types/functions.py index b448b46..e5c3ee7 100644 --- a/google/cloud/functions_v1/types/functions.py +++ b/google/cloud/functions_v1/types/functions.py @@ -74,7 +74,7 @@ class CloudFunction(proto.Message): The Google Cloud Storage URL, starting with gs://, pointing to the zip archive which contains the function. - source_repository (~.gcf_functions.SourceRepository): + source_repository (google.cloud.functions_v1.types.SourceRepository): **Beta Feature** The source repository where a function is hosted. @@ -82,13 +82,13 @@ class CloudFunction(proto.Message): The Google Cloud Storage signed URL used for source uploading, generated by [google.cloud.functions.v1.GenerateUploadUrl][] - https_trigger (~.gcf_functions.HttpsTrigger): + https_trigger (google.cloud.functions_v1.types.HttpsTrigger): An HTTPS endpoint type of source that can be triggered via URL. - event_trigger (~.gcf_functions.EventTrigger): + event_trigger (google.cloud.functions_v1.types.EventTrigger): A source that fires events in response to a condition in another service. - status (~.gcf_functions.CloudFunctionStatus): + status (google.cloud.functions_v1.types.CloudFunctionStatus): Output only. Status of the function deployment. entry_point (str): @@ -105,7 +105,7 @@ class CloudFunction(proto.Message): function. For a complete list of possible choices, see the ```gcloud`` command reference `__. - timeout (~.duration.Duration): + timeout (google.protobuf.duration_pb2.Duration): The function execution timeout. Execution is considered failed and can be terminated if the function is not completed at the end of the @@ -116,16 +116,16 @@ class CloudFunction(proto.Message): service_account_email (str): The email of the function's service account. If empty, defaults to ``{project_id}@appspot.gserviceaccount.com``. - update_time (~.timestamp.Timestamp): + update_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The last update timestamp of a Cloud Function. version_id (int): Output only. The version identifier of the Cloud Function. Each deployment attempt results in a new version of a function being created. - labels (Sequence[~.gcf_functions.CloudFunction.LabelsEntry]): + labels (Sequence[google.cloud.functions_v1.types.CloudFunction.LabelsEntry]): Labels associated with this Cloud Function. - environment_variables (Sequence[~.gcf_functions.CloudFunction.EnvironmentVariablesEntry]): + environment_variables (Sequence[google.cloud.functions_v1.types.CloudFunction.EnvironmentVariablesEntry]): Environment variables that shall be available during function execution. network (str): @@ -161,10 +161,10 @@ class CloudFunction(proto.Message): See `the VPC documentation `__ for more information on connecting Cloud projects. - vpc_connector_egress_settings (~.gcf_functions.CloudFunction.VpcConnectorEgressSettings): + vpc_connector_egress_settings (google.cloud.functions_v1.types.CloudFunction.VpcConnectorEgressSettings): The egress settings for the connector, controlling what traffic is diverted through it. - ingress_settings (~.gcf_functions.CloudFunction.IngressSettings): + ingress_settings (google.cloud.functions_v1.types.CloudFunction.IngressSettings): The ingress settings for the function, controlling what traffic can reach it. build_id (str): @@ -343,7 +343,7 @@ class EventTrigger(proto.Message): the API will be used. For example, ``storage.googleapis.com`` is the default for all event types in the ``google.storage`` namespace. - failure_policy (~.gcf_functions.FailurePolicy): + failure_policy (google.cloud.functions_v1.types.FailurePolicy): Specifies policy for failed executions. """ @@ -362,7 +362,7 @@ class FailurePolicy(proto.Message): them). Attributes: - retry (~.gcf_functions.FailurePolicy.Retry): + retry (google.cloud.functions_v1.types.FailurePolicy.Retry): If specified, then the function will be retried in case of a failure. """ @@ -386,7 +386,7 @@ class CreateFunctionRequest(proto.Message): Required. The project and location in which the function should be created, specified in the format ``projects/*/locations/*`` - function (~.gcf_functions.CloudFunction): + function (google.cloud.functions_v1.types.CloudFunction): Required. Function to be created. """ @@ -399,9 +399,9 @@ class UpdateFunctionRequest(proto.Message): r"""Request for the ``UpdateFunction`` method. Attributes: - function (~.gcf_functions.CloudFunction): + function (google.cloud.functions_v1.types.CloudFunction): Required. New version of the function. - update_mask (~.field_mask.FieldMask): + update_mask (google.protobuf.field_mask_pb2.FieldMask): Required list of fields to be updated in this request. """ @@ -456,7 +456,7 @@ class ListFunctionsResponse(proto.Message): r"""Response for the ``ListFunctions`` method. Attributes: - functions (Sequence[~.gcf_functions.CloudFunction]): + functions (Sequence[google.cloud.functions_v1.types.CloudFunction]): The functions that match the request. next_page_token (str): If not empty, indicates that there may be more functions diff --git a/google/cloud/functions_v1/types/operations.py b/google/cloud/functions_v1/types/operations.py index 2d70e9e..5a452e1 100644 --- a/google/cloud/functions_v1/types/operations.py +++ b/google/cloud/functions_v1/types/operations.py @@ -43,16 +43,16 @@ class OperationMetadataV1(proto.Message): target (str): Target of the operation - for example projects/project-1/locations/region-1/functions/function-1 - type_ (~.operations.OperationType): + type_ (google.cloud.functions_v1.types.OperationType): Type of operation. - request (~.gp_any.Any): + request (google.protobuf.any_pb2.Any): The original request that started the operation. version_id (int): Version id of the function created or updated by an API call. This field is only populated for Create and Update operations. - update_time (~.timestamp.Timestamp): + update_time (google.protobuf.timestamp_pb2.Timestamp): The last update timestamp of the operation. build_id (str): The Cloud Build ID of the function created or diff --git a/noxfile.py b/noxfile.py index 70d9c13..4d37cd3 100644 --- a/noxfile.py +++ b/noxfile.py @@ -18,6 +18,7 @@ from __future__ import absolute_import import os +import pathlib import shutil import nox @@ -30,6 +31,8 @@ SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + # 'docfx' is excluded since it only needs to run in 'docs-presubmit' nox.options.sessions = [ "unit", @@ -41,6 +44,9 @@ "docs", ] +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + @nox.session(python=DEFAULT_PYTHON_VERSION) def lint(session): @@ -81,18 +87,21 @@ def lint_setup_py(session): def default(session): # Install all test dependencies, then install this package in-place. - session.install("asyncmock", "pytest-asyncio") - session.install( - "mock", "pytest", "pytest-cov", + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) + session.install("asyncmock", "pytest-asyncio", "-c", constraints_path) - session.install("-e", ".") + session.install("mock", "pytest", "pytest-cov", "-c", constraints_path) + + session.install("-e", ".", "-c", constraints_path) # Run py.test against the unit tests. session.run( "py.test", "--quiet", + f"--junitxml=unit_{session.python}_sponge_log.xml", "--cov=google/cloud", "--cov=tests/unit", "--cov-append", @@ -113,6 +122,9 @@ def unit(session): @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" + constraints_path = str( + CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" + ) system_test_path = os.path.join("tests", "system.py") system_test_folder_path = os.path.join("tests", "system") @@ -122,6 +134,9 @@ def system(session): # Sanity check: Only run tests if the environment variable is set. if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""): session.skip("Credentials must be set via environment variable") + # Install pyopenssl for mTLS testing. + if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true": + session.install("pyopenssl") system_test_exists = os.path.exists(system_test_path) system_test_folder_exists = os.path.exists(system_test_folder_path) @@ -134,16 +149,26 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. - session.install( - "mock", "pytest", "google-cloud-testutils", - ) - session.install("-e", ".") + session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path) + session.install("-e", ".", "-c", constraints_path) # Run py.test against the system tests. if system_test_exists: - session.run("py.test", "--quiet", system_test_path, *session.posargs) + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_path, + *session.posargs, + ) if system_test_folder_exists: - session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) + session.run( + "py.test", + "--quiet", + f"--junitxml=system_{session.python}_sponge_log.xml", + system_test_folder_path, + *session.posargs, + ) @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -154,7 +179,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=99") + session.run("coverage", "report", "--show-missing", "--fail-under=98") session.run("coverage", "erase") @@ -186,9 +211,7 @@ def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") - # sphinx-docfx-yaml supports up to sphinx version 1.5.5. - # https://github.com/docascode/sphinx-docfx-yaml/issues/97 - session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml") + session.install("sphinx", "alabaster", "recommonmark", "gcp-sphinx-docfx-yaml") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/renovate.json b/renovate.json index 4fa9493..f08bc22 100644 --- a/renovate.json +++ b/renovate.json @@ -1,5 +1,6 @@ { "extends": [ "config:base", ":preserveSemverRanges" - ] + ], + "ignorePaths": [".pre-commit-config.yaml"] } diff --git a/setup.py b/setup.py index b94ddf6..a359f5a 100644 --- a/setup.py +++ b/setup.py @@ -41,9 +41,9 @@ platforms="Posix; MacOS X; Windows", include_package_data=True, install_requires=( - "google-api-core[grpc] >= 1.22.0, < 2.0.0dev", + "google-api-core[grpc] >= 1.22.2, < 2.0.0dev", "proto-plus >= 1.10.0", - "grpc-google-iam-v1", + "grpc-google-iam-v1 >= 0.12.3, < 0.13dev", ), python_requires=">=3.6", scripts=["scripts/fixup_functions_v1_keywords.py",], diff --git a/synth.metadata b/synth.metadata index 50468d4..7df8926 100644 --- a/synth.metadata +++ b/synth.metadata @@ -3,30 +3,30 @@ { "git": { "name": ".", - "remote": "https://github.com/googleapis/python-functions.git", - "sha": "f9d4d8f1366587b3bdee06ab81ee5ff23ca67bc2" + "remote": "git@github.com:googleapis/python-functions", + "sha": "d95bb94ee31c64a8b6859e29941e61bcc96bd43d" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "dd372aa22ded7a8ba6f0e03a80e06358a3fa0907", - "internalRef": "347055288" + "sha": "915925089600094e72e4bfa8cf586c170e6b7109", + "internalRef": "366152684" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "4679e7e415221f03ff2a71e3ffad75b9ec41d87e" + "sha": "6d76df2138f8f841e5a5b9ac427f81def520c15f" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "4679e7e415221f03ff2a71e3ffad75b9ec41d87e" + "sha": "6d76df2138f8f841e5a5b9ac427f81def520c15f" } } ], @@ -40,95 +40,5 @@ "generator": "bazel" } } - ], - "generatedFiles": [ - ".flake8", - ".github/CONTRIBUTING.md", - ".github/ISSUE_TEMPLATE/bug_report.md", - ".github/ISSUE_TEMPLATE/feature_request.md", - ".github/ISSUE_TEMPLATE/support_request.md", - ".github/PULL_REQUEST_TEMPLATE.md", - ".github/header-checker-lint.yml", - ".github/release-please.yml", - ".github/snippet-bot.yml", - ".gitignore", - ".kokoro/build.sh", - ".kokoro/continuous/common.cfg", - ".kokoro/continuous/continuous.cfg", - ".kokoro/docker/docs/Dockerfile", - ".kokoro/docker/docs/fetch_gpg_keys.sh", - ".kokoro/docs/common.cfg", - ".kokoro/docs/docs-presubmit.cfg", - ".kokoro/docs/docs.cfg", - ".kokoro/populate-secrets.sh", - ".kokoro/presubmit/common.cfg", - ".kokoro/presubmit/presubmit.cfg", - ".kokoro/publish-docs.sh", - ".kokoro/release.sh", - ".kokoro/release/common.cfg", - ".kokoro/release/release.cfg", - ".kokoro/samples/lint/common.cfg", - ".kokoro/samples/lint/continuous.cfg", - ".kokoro/samples/lint/periodic.cfg", - ".kokoro/samples/lint/presubmit.cfg", - ".kokoro/samples/python3.6/common.cfg", - ".kokoro/samples/python3.6/continuous.cfg", - ".kokoro/samples/python3.6/periodic.cfg", - ".kokoro/samples/python3.6/presubmit.cfg", - ".kokoro/samples/python3.7/common.cfg", - ".kokoro/samples/python3.7/continuous.cfg", - ".kokoro/samples/python3.7/periodic.cfg", - ".kokoro/samples/python3.7/presubmit.cfg", - ".kokoro/samples/python3.8/common.cfg", - ".kokoro/samples/python3.8/continuous.cfg", - ".kokoro/samples/python3.8/periodic.cfg", - ".kokoro/samples/python3.8/presubmit.cfg", - ".kokoro/test-samples.sh", - ".kokoro/trampoline.sh", - ".kokoro/trampoline_v2.sh", - ".pre-commit-config.yaml", - ".trampolinerc", - "CODE_OF_CONDUCT.md", - "CONTRIBUTING.rst", - "LICENSE", - "MANIFEST.in", - "docs/_static/custom.css", - "docs/_templates/layout.html", - "docs/conf.py", - "docs/functions_v1/services.rst", - "docs/functions_v1/types.rst", - "docs/multiprocessing.rst", - "functions-v1-py.tar.gz", - "google/cloud/functions/__init__.py", - "google/cloud/functions/py.typed", - "google/cloud/functions_v1/__init__.py", - "google/cloud/functions_v1/py.typed", - "google/cloud/functions_v1/services/__init__.py", - "google/cloud/functions_v1/services/cloud_functions_service/__init__.py", - "google/cloud/functions_v1/services/cloud_functions_service/async_client.py", - "google/cloud/functions_v1/services/cloud_functions_service/client.py", - "google/cloud/functions_v1/services/cloud_functions_service/pagers.py", - "google/cloud/functions_v1/services/cloud_functions_service/transports/__init__.py", - "google/cloud/functions_v1/services/cloud_functions_service/transports/base.py", - "google/cloud/functions_v1/services/cloud_functions_service/transports/grpc.py", - "google/cloud/functions_v1/services/cloud_functions_service/transports/grpc_asyncio.py", - "google/cloud/functions_v1/types/__init__.py", - "google/cloud/functions_v1/types/functions.py", - "google/cloud/functions_v1/types/operations.py", - "mypy.ini", - "noxfile.py", - "renovate.json", - "scripts/decrypt-secrets.sh", - "scripts/fixup_functions_v1_keywords.py", - "scripts/readme-gen/readme_gen.py", - "scripts/readme-gen/templates/README.tmpl.rst", - "scripts/readme-gen/templates/auth.tmpl.rst", - "scripts/readme-gen/templates/auth_api_key.tmpl.rst", - "scripts/readme-gen/templates/install_deps.tmpl.rst", - "scripts/readme-gen/templates/install_portaudio.tmpl.rst", - "setup.cfg", - "testing/.gitignore", - "tests/unit/gapic/functions_v1/__init__.py", - "tests/unit/gapic/functions_v1/test_cloud_functions_service.py" ] } \ No newline at end of file diff --git a/synth.py b/synth.py index a5a5407..78f6727 100644 --- a/synth.py +++ b/synth.py @@ -37,7 +37,7 @@ # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- -templated_files = common.py_library(cov_level=99, microgenerator=True) +templated_files = common.py_library(cov_level=98, microgenerator=True) s.move( templated_files, excludes=[".coveragerc"] ) # the microgenerator has a good coveragerc file diff --git a/functions-v1-py.tar.gz b/testing/constraints-3.10.txt similarity index 100% rename from functions-v1-py.tar.gz rename to testing/constraints-3.10.txt diff --git a/testing/constraints-3.11.txt b/testing/constraints-3.11.txt new file mode 100644 index 0000000..e69de29 diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt new file mode 100644 index 0000000..cc56e72 --- /dev/null +++ b/testing/constraints-3.6.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List *all* library dependencies and extras in this file. +# Pin the version to the lower bound. +# +# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", +# Then this file should have foo==1.14.0 +google-api-core==1.22.2 +proto-plus==1.10.0 +grpc-google-iam-v1==0.12.3 diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt new file mode 100644 index 0000000..e69de29 diff --git a/testing/constraints-3.8.txt b/testing/constraints-3.8.txt new file mode 100644 index 0000000..e69de29 diff --git a/testing/constraints-3.9.txt b/testing/constraints-3.9.txt new file mode 100644 index 0000000..e69de29 diff --git a/tests/unit/gapic/functions_v1/__init__.py b/tests/unit/gapic/functions_v1/__init__.py index 8b13789..42ffdf2 100644 --- a/tests/unit/gapic/functions_v1/__init__.py +++ b/tests/unit/gapic/functions_v1/__init__.py @@ -1 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/tests/unit/gapic/functions_v1/test_cloud_functions_service.py b/tests/unit/gapic/functions_v1/test_cloud_functions_service.py index f462f62..d328a0e 100644 --- a/tests/unit/gapic/functions_v1/test_cloud_functions_service.py +++ b/tests/unit/gapic/functions_v1/test_cloud_functions_service.py @@ -102,7 +102,24 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [CloudFunctionsServiceClient, CloudFunctionsServiceAsyncClient] + "client_class", [CloudFunctionsServiceClient, CloudFunctionsServiceAsyncClient,] +) +def test_cloud_functions_service_client_from_service_account_info(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == "cloudfunctions.googleapis.com:443" + + +@pytest.mark.parametrize( + "client_class", [CloudFunctionsServiceClient, CloudFunctionsServiceAsyncClient,] ) def test_cloud_functions_service_client_from_service_account_file(client_class): creds = credentials.AnonymousCredentials() @@ -112,16 +129,21 @@ def test_cloud_functions_service_client_from_service_account_file(client_class): factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) client = client_class.from_service_account_json("dummy/file/path.json") assert client.transport._credentials == creds + assert isinstance(client, client_class) assert client.transport._host == "cloudfunctions.googleapis.com:443" def test_cloud_functions_service_client_get_transport_class(): transport = CloudFunctionsServiceClient.get_transport_class() - assert transport == transports.CloudFunctionsServiceGrpcTransport + available_transports = [ + transports.CloudFunctionsServiceGrpcTransport, + ] + assert transport in available_transports transport = CloudFunctionsServiceClient.get_transport_class("grpc") assert transport == transports.CloudFunctionsServiceGrpcTransport @@ -176,7 +198,7 @@ def test_cloud_functions_service_client_client_options( credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -192,7 +214,7 @@ def test_cloud_functions_service_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -208,7 +230,7 @@ def test_cloud_functions_service_client_client_options( credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -236,7 +258,7 @@ def test_cloud_functions_service_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id="octopus", client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -297,29 +319,25 @@ def test_cloud_functions_service_client_mtls_env_auto( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - ssl_channel_creds = mock.Mock() - with mock.patch( - "grpc.ssl_channel_credentials", return_value=ssl_channel_creds - ): - patched.return_value = None - client = client_class(client_options=options) + patched.return_value = None + client = client_class(client_options=options) - if use_client_cert_env == "false": - expected_ssl_channel_creds = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_ssl_channel_creds = ssl_channel_creds - expected_host = client.DEFAULT_MTLS_ENDPOINT + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) # Check the case ADC client cert is provided. Whether client cert is used depends on # GOOGLE_API_USE_CLIENT_CERTIFICATE value. @@ -328,66 +346,53 @@ def test_cloud_functions_service_client_mtls_env_auto( ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, ): with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.ssl_credentials", - new_callable=mock.PropertyMock, - ) as ssl_credentials_mock: - if use_client_cert_env == "false": - is_mtls_mock.return_value = False - ssl_credentials_mock.return_value = None - expected_host = client.DEFAULT_ENDPOINT - expected_ssl_channel_creds = None - else: - is_mtls_mock.return_value = True - ssl_credentials_mock.return_value = mock.Mock() - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_ssl_channel_creds = ( - ssl_credentials_mock.return_value - ) - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - ssl_channel_credentials=expected_ssl_channel_creds, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.grpc.SslCredentials.__init__", return_value=None - ): - with mock.patch( - "google.auth.transport.grpc.SslCredentials.is_mtls", - new_callable=mock.PropertyMock, - ) as is_mtls_mock: - is_mtls_mock.return_value = False patched.return_value = None client = client_class() patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_ENDPOINT, + host=expected_host, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=expected_client_cert_source, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + @pytest.mark.parametrize( "client_class,transport_class,transport_name", @@ -417,7 +422,7 @@ def test_cloud_functions_service_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -451,7 +456,7 @@ def test_cloud_functions_service_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -470,7 +475,7 @@ def test_cloud_functions_service_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - ssl_channel_credentials=None, + client_cert_source_for_mtls=None, quota_project_id=None, client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -515,6 +520,22 @@ def test_list_functions_from_dict(): test_list_functions(request_type=dict) +def test_list_functions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudFunctionsServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_functions), "__call__") as call: + client.list_functions() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == functions.ListFunctionsRequest() + + @pytest.mark.asyncio async def test_list_functions_async( transport: str = "grpc_asyncio", request_type=functions.ListFunctionsRequest @@ -838,6 +859,22 @@ def test_get_function_from_dict(): test_get_function(request_type=dict) +def test_get_function_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudFunctionsServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_function), "__call__") as call: + client.get_function() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == functions.GetFunctionRequest() + + @pytest.mark.asyncio async def test_get_function_async( transport: str = "grpc_asyncio", request_type=functions.GetFunctionRequest @@ -1084,6 +1121,22 @@ def test_create_function_from_dict(): test_create_function(request_type=dict) +def test_create_function_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudFunctionsServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.create_function), "__call__") as call: + client.create_function() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == functions.CreateFunctionRequest() + + @pytest.mark.asyncio async def test_create_function_async( transport: str = "grpc_asyncio", request_type=functions.CreateFunctionRequest @@ -1296,6 +1349,22 @@ def test_update_function_from_dict(): test_update_function(request_type=dict) +def test_update_function_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudFunctionsServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.update_function), "__call__") as call: + client.update_function() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == functions.UpdateFunctionRequest() + + @pytest.mark.asyncio async def test_update_function_async( transport: str = "grpc_asyncio", request_type=functions.UpdateFunctionRequest @@ -1502,6 +1571,22 @@ def test_delete_function_from_dict(): test_delete_function(request_type=dict) +def test_delete_function_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudFunctionsServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_function), "__call__") as call: + client.delete_function() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == functions.DeleteFunctionRequest() + + @pytest.mark.asyncio async def test_delete_function_async( transport: str = "grpc_asyncio", request_type=functions.DeleteFunctionRequest @@ -1711,6 +1796,22 @@ def test_call_function_from_dict(): test_call_function(request_type=dict) +def test_call_function_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudFunctionsServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.call_function), "__call__") as call: + client.call_function() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == functions.CallFunctionRequest() + + @pytest.mark.asyncio async def test_call_function_async( transport: str = "grpc_asyncio", request_type=functions.CallFunctionRequest @@ -1932,6 +2033,24 @@ def test_generate_upload_url_from_dict(): test_generate_upload_url(request_type=dict) +def test_generate_upload_url_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudFunctionsServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_upload_url), "__call__" + ) as call: + client.generate_upload_url() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == functions.GenerateUploadUrlRequest() + + @pytest.mark.asyncio async def test_generate_upload_url_async( transport: str = "grpc_asyncio", request_type=functions.GenerateUploadUrlRequest @@ -2070,6 +2189,24 @@ def test_generate_download_url_from_dict(): test_generate_download_url(request_type=dict) +def test_generate_download_url_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudFunctionsServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.generate_download_url), "__call__" + ) as call: + client.generate_download_url() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == functions.GenerateDownloadUrlRequest() + + @pytest.mark.asyncio async def test_generate_download_url_async( transport: str = "grpc_asyncio", request_type=functions.GenerateDownloadUrlRequest @@ -2206,6 +2343,22 @@ def test_set_iam_policy_from_dict(): test_set_iam_policy(request_type=dict) +def test_set_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudFunctionsServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call: + client.set_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == iam_policy.SetIamPolicyRequest() + + @pytest.mark.asyncio async def test_set_iam_policy_async( transport: str = "grpc_asyncio", request_type=iam_policy.SetIamPolicyRequest @@ -2354,6 +2507,22 @@ def test_get_iam_policy_from_dict(): test_get_iam_policy(request_type=dict) +def test_get_iam_policy_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudFunctionsServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call: + client.get_iam_policy() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == iam_policy.GetIamPolicyRequest() + + @pytest.mark.asyncio async def test_get_iam_policy_async( transport: str = "grpc_asyncio", request_type=iam_policy.GetIamPolicyRequest @@ -2504,6 +2673,24 @@ def test_test_iam_permissions_from_dict(): test_test_iam_permissions(request_type=dict) +def test_test_iam_permissions_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = CloudFunctionsServiceClient( + credentials=credentials.AnonymousCredentials(), transport="grpc", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.test_iam_permissions), "__call__" + ) as call: + client.test_iam_permissions() + call.assert_called() + _, args, _ = call.mock_calls[0] + + assert args[0] == iam_policy.TestIamPermissionsRequest() + + @pytest.mark.asyncio async def test_test_iam_permissions_async( transport: str = "grpc_asyncio", request_type=iam_policy.TestIamPermissionsRequest @@ -2799,6 +2986,53 @@ def test_cloud_functions_service_transport_auth_adc(): ) +@pytest.mark.parametrize( + "transport_class", + [ + transports.CloudFunctionsServiceGrpcTransport, + transports.CloudFunctionsServiceGrpcAsyncIOTransport, + ], +) +def test_cloud_functions_service_grpc_transport_client_cert_source_for_mtls( + transport_class, +): + cred = credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + def test_cloud_functions_service_host_no_port(): client = CloudFunctionsServiceClient( credentials=credentials.AnonymousCredentials(), @@ -2820,7 +3054,7 @@ def test_cloud_functions_service_host_with_port(): def test_cloud_functions_service_grpc_transport_channel(): - channel = grpc.insecure_channel("http://localhost/") + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.CloudFunctionsServiceGrpcTransport( @@ -2832,7 +3066,7 @@ def test_cloud_functions_service_grpc_transport_channel(): def test_cloud_functions_service_grpc_asyncio_transport_channel(): - channel = aio.insecure_channel("http://localhost/") + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) # Check that channel is used if provided. transport = transports.CloudFunctionsServiceGrpcAsyncIOTransport( @@ -2843,6 +3077,8 @@ def test_cloud_functions_service_grpc_asyncio_transport_channel(): assert transport._ssl_channel_credentials == None +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -2857,7 +3093,7 @@ def test_cloud_functions_service_transport_channel_mtls_with_client_cert_source( "grpc.ssl_channel_credentials", autospec=True ) as grpc_ssl_channel_cred: with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_ssl_cred = mock.Mock() grpc_ssl_channel_cred.return_value = mock_ssl_cred @@ -2895,6 +3131,8 @@ def test_cloud_functions_service_transport_channel_mtls_with_client_cert_source( assert transport._ssl_channel_credentials == mock_ssl_cred +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. @pytest.mark.parametrize( "transport_class", [ @@ -2910,7 +3148,7 @@ def test_cloud_functions_service_transport_channel_mtls_with_adc(transport_class ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), ): with mock.patch.object( - transport_class, "create_channel", autospec=True + transport_class, "create_channel" ) as grpc_create_channel: mock_grpc_channel = mock.Mock() grpc_create_channel.return_value = mock_grpc_channel