diff --git a/.github/snippet-bot.yml b/.github/snippet-bot.yml
new file mode 100644
index 00000000..e69de29b
diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg
index 6039d7e9..389a65e6 100644
--- a/.kokoro/docs/common.cfg
+++ b/.kokoro/docs/common.cfg
@@ -30,7 +30,7 @@ env_vars: {
env_vars: {
key: "V2_STAGING_BUCKET"
- value: "docs-staging-v2-staging"
+ value: "docs-staging-v2"
}
# It will upload the docker image after successful builds.
diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh
new file mode 100755
index 00000000..f5251425
--- /dev/null
+++ b/.kokoro/populate-secrets.sh
@@ -0,0 +1,43 @@
+#!/bin/bash
+# Copyright 2020 Google LLC.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eo pipefail
+
+function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;}
+function msg { println "$*" >&2 ;}
+function println { printf '%s\n' "$(now) $*" ;}
+
+
+# Populates requested secrets set in SECRET_MANAGER_KEYS from service account:
+# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com
+SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager"
+msg "Creating folder on disk for secrets: ${SECRET_LOCATION}"
+mkdir -p ${SECRET_LOCATION}
+for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g")
+do
+ msg "Retrieving secret ${key}"
+ docker run --entrypoint=gcloud \
+ --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \
+ gcr.io/google.com/cloudsdktool/cloud-sdk \
+ secrets versions access latest \
+ --project cloud-devrel-kokoro-resources \
+ --secret ${key} > \
+ "${SECRET_LOCATION}/${key}"
+ if [[ $? == 0 ]]; then
+ msg "Secret written to ${SECRET_LOCATION}/${key}"
+ else
+ msg "Error retrieving secret ${key}"
+ fi
+done
diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg
index 77edf9ab..3156ce87 100644
--- a/.kokoro/release/common.cfg
+++ b/.kokoro/release/common.cfg
@@ -23,42 +23,18 @@ env_vars: {
value: "github/python-datacatalog/.kokoro/release.sh"
}
-# Fetch the token needed for reporting release status to GitHub
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "yoshi-automation-github-key"
- }
- }
-}
-
-# Fetch PyPI password
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "google_cloud_pypi_password"
- }
- }
-}
-
-# Fetch magictoken to use with Magic Github Proxy
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "releasetool-magictoken"
- }
- }
+# Fetch PyPI password
+before_action {
+ fetch_keystore {
+ keystore_resource {
+ keystore_config_id: 73713
+ keyname: "google_cloud_pypi_password"
+ }
+ }
}
-# Fetch api key to use with Magic Github Proxy
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "magic-github-proxy-api-key"
- }
- }
-}
+# Tokens needed to report release status back to GitHub
+env_vars: {
+ key: "SECRET_MANAGER_KEYS"
+ value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg
index 1e4766c2..9e55ece3 100644
--- a/.kokoro/samples/python3.6/common.cfg
+++ b/.kokoro/samples/python3.6/common.cfg
@@ -13,6 +13,12 @@ env_vars: {
value: "py-3.6"
}
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py36"
+}
+
env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
value: "github/python-datacatalog/.kokoro/test-samples.sh"
diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg
index 3a67b23d..d1630ce2 100644
--- a/.kokoro/samples/python3.7/common.cfg
+++ b/.kokoro/samples/python3.7/common.cfg
@@ -13,6 +13,12 @@ env_vars: {
value: "py-3.7"
}
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py37"
+}
+
env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
value: "github/python-datacatalog/.kokoro/test-samples.sh"
diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg
index 5ffafdbc..b53c2553 100644
--- a/.kokoro/samples/python3.8/common.cfg
+++ b/.kokoro/samples/python3.8/common.cfg
@@ -13,6 +13,12 @@ env_vars: {
value: "py-3.8"
}
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py38"
+}
+
env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
value: "github/python-datacatalog/.kokoro/test-samples.sh"
diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh
index b8ac5ae1..49ac61fa 100755
--- a/.kokoro/test-samples.sh
+++ b/.kokoro/test-samples.sh
@@ -28,6 +28,12 @@ if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
git checkout $LATEST_RELEASE
fi
+# Exit early if samples directory doesn't exist
+if [ ! -d "./samples" ]; then
+ echo "No tests run. `./samples` not found"
+ exit 0
+fi
+
# Disable buffering, so that the logs stream through.
export PYTHONUNBUFFERED=1
@@ -101,4 +107,4 @@ cd "$ROOT"
# Workaround for Kokoro permissions issue: delete secrets
rm testing/{test-env.sh,client-secrets.json,service-account.json}
-exit "$RTN"
\ No newline at end of file
+exit "$RTN"
diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh
index e8c4251f..f39236e9 100755
--- a/.kokoro/trampoline.sh
+++ b/.kokoro/trampoline.sh
@@ -15,9 +15,14 @@
set -eo pipefail
-python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" || ret_code=$?
+# Always run the cleanup script, regardless of the success of bouncing into
+# the container.
+function cleanup() {
+ chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
+ ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
+ echo "cleanup";
+}
+trap cleanup EXIT
-chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
-${KOKORO_GFILE_DIR}/trampoline_cleanup.sh || true
-
-exit ${ret_code}
+$(dirname $0)/populate-secrets.sh # Secret Manager secrets.
+python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py"
\ No newline at end of file
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
index b3d1f602..039f4368 100644
--- a/CODE_OF_CONDUCT.md
+++ b/CODE_OF_CONDUCT.md
@@ -1,44 +1,95 @@
-# Contributor Code of Conduct
+# Code of Conduct
-As contributors and maintainers of this project,
-and in the interest of fostering an open and welcoming community,
-we pledge to respect all people who contribute through reporting issues,
-posting feature requests, updating documentation,
-submitting pull requests or patches, and other activities.
+## Our Pledge
-We are committed to making participation in this project
-a harassment-free experience for everyone,
-regardless of level of experience, gender, gender identity and expression,
-sexual orientation, disability, personal appearance,
-body size, race, ethnicity, age, religion, or nationality.
+In the interest of fostering an open and welcoming environment, we as
+contributors and maintainers pledge to making participation in our project and
+our community a harassment-free experience for everyone, regardless of age, body
+size, disability, ethnicity, gender identity and expression, level of
+experience, education, socio-economic status, nationality, personal appearance,
+race, religion, or sexual identity and orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment
+include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
-* The use of sexualized language or imagery
-* Personal attacks
-* Trolling or insulting/derogatory comments
-* Public or private harassment
-* Publishing other's private information,
-such as physical or electronic
-addresses, without explicit permission
-* Other unethical or unprofessional conduct.
+* The use of sexualized language or imagery and unwelcome sexual attention or
+ advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic
+ address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+ professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable
+behavior and are expected to take appropriate and fair corrective action in
+response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or reject
-comments, commits, code, wiki edits, issues, and other contributions
-that are not aligned to this Code of Conduct.
-By adopting this Code of Conduct,
-project maintainers commit themselves to fairly and consistently
-applying these principles to every aspect of managing this project.
-Project maintainers who do not follow or enforce the Code of Conduct
-may be permanently removed from the project team.
-
-This code of conduct applies both within project spaces and in public spaces
-when an individual is representing the project or its community.
-
-Instances of abusive, harassing, or otherwise unacceptable behavior
-may be reported by opening an issue
-or contacting one or more of the project maintainers.
-
-This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0,
-available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/)
+comments, commits, code, wiki edits, issues, and other contributions that are
+not aligned to this Code of Conduct, or to ban temporarily or permanently any
+contributor for other behaviors that they deem inappropriate, threatening,
+offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces
+when an individual is representing the project or its community. Examples of
+representing a project or community include using an official project e-mail
+address, posting via an official social media account, or acting as an appointed
+representative at an online or offline event. Representation of a project may be
+further defined and clarified by project maintainers.
+
+This Code of Conduct also applies outside the project spaces when the Project
+Steward has a reasonable belief that an individual's behavior may have a
+negative impact on the project or its community.
+
+## Conflict Resolution
+
+We do not believe that all conflict is bad; healthy debate and disagreement
+often yield positive results. However, it is never okay to be disrespectful or
+to engage in behavior that violates the project’s code of conduct.
+
+If you see someone violating the code of conduct, you are encouraged to address
+the behavior directly with those involved. Many issues can be resolved quickly
+and easily, and this gives people more control over the outcome of their
+dispute. If you are unable to resolve the matter for any reason, or if the
+behavior is threatening or harassing, report it. We are dedicated to providing
+an environment where participants feel welcome and safe.
+
+
+Reports should be directed to *googleapis-stewards@google.com*, the
+Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to
+receive and address reported violations of the code of conduct. They will then
+work with a committee consisting of representatives from the Open Source
+Programs Office and the Google Open Source Strategy team. If for any reason you
+are uncomfortable reaching out to the Project Steward, please email
+opensource@google.com.
+
+We will investigate every complaint, but you may not receive a direct response.
+We will use our discretion in determining when and how to follow up on reported
+incidents, which may range from not taking action to permanent expulsion from
+the project and project-sponsored spaces. We will notify the accused of the
+report and provide them an opportunity to discuss it before any action is taken.
+The identity of the reporter will be omitted from the details of the report
+supplied to the accused. In potentially harmful situations, such as ongoing
+harassment or threats to anyone's safety, we may take action without notice.
+
+## Attribution
+
+This Code of Conduct is adapted from the Contributor Covenant, version 1.4,
+available at
+https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
\ No newline at end of file
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 1800a1f1..cdd8c7f3 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -80,25 +80,6 @@ We use `nox `__ to instrument our tests.
.. nox: https://pypi.org/project/nox/
-Note on Editable Installs / Develop Mode
-========================================
-
-- As mentioned previously, using ``setuptools`` in `develop mode`_
- or a ``pip`` `editable install`_ is not possible with this
- library. This is because this library uses `namespace packages`_.
- For context see `Issue #2316`_ and the relevant `PyPA issue`_.
-
- Since ``editable`` / ``develop`` mode can't be used, packages
- need to be installed directly. Hence your changes to the source
- tree don't get incorporated into the **already installed**
- package.
-
-.. _namespace packages: https://www.python.org/dev/peps/pep-0420/
-.. _Issue #2316: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2316
-.. _PyPA issue: https://github.com/pypa/packaging-problems/issues/12
-.. _develop mode: https://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode
-.. _editable install: https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs
-
*****************************************
I'm getting weird errors... Can you help?
*****************************************
diff --git a/docs/conf.py b/docs/conf.py
index 015d055f..01be52f5 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -29,7 +29,7 @@
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
-needs_sphinx = "1.6.3"
+needs_sphinx = "1.5.5"
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
@@ -39,6 +39,7 @@
"sphinx.ext.autosummary",
"sphinx.ext.intersphinx",
"sphinx.ext.coverage",
+ "sphinx.ext.doctest",
"sphinx.ext.napoleon",
"sphinx.ext.todo",
"sphinx.ext.viewcode",
@@ -348,6 +349,7 @@
"google-auth": ("https://google-auth.readthedocs.io/en/stable", None),
"google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,),
"grpc": ("https://grpc.io/grpc/python/", None),
+ "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None),
}
diff --git a/docs/datacatalog_v1/types.rst b/docs/datacatalog_v1/types.rst
index cb94a5e5..19f12ef8 100644
--- a/docs/datacatalog_v1/types.rst
+++ b/docs/datacatalog_v1/types.rst
@@ -3,3 +3,4 @@ Types for Google Cloud Datacatalog v1 API
.. automodule:: google.cloud.datacatalog_v1.types
:members:
+ :show-inheritance:
diff --git a/docs/datacatalog_v1beta1/types.rst b/docs/datacatalog_v1beta1/types.rst
index 75ee2bb4..a1baedaf 100644
--- a/docs/datacatalog_v1beta1/types.rst
+++ b/docs/datacatalog_v1beta1/types.rst
@@ -3,3 +3,4 @@ Types for Google Cloud Datacatalog v1beta1 API
.. automodule:: google.cloud.datacatalog_v1beta1.types
:members:
+ :show-inheritance:
diff --git a/google/cloud/datacatalog_v1/services/data_catalog/async_client.py b/google/cloud/datacatalog_v1/services/data_catalog/async_client.py
index 84dac12f..e5cb1dbf 100644
--- a/google/cloud/datacatalog_v1/services/data_catalog/async_client.py
+++ b/google/cloud/datacatalog_v1/services/data_catalog/async_client.py
@@ -41,7 +41,7 @@
from google.iam.v1 import policy_pb2 as policy # type: ignore
from google.protobuf import field_mask_pb2 as field_mask # type: ignore
-from .transports.base import DataCatalogTransport
+from .transports.base import DataCatalogTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import DataCatalogGrpcAsyncIOTransport
from .client import DataCatalogClient
@@ -56,19 +56,56 @@ class DataCatalogAsyncClient:
DEFAULT_ENDPOINT = DataCatalogClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = DataCatalogClient.DEFAULT_MTLS_ENDPOINT
+ entry_path = staticmethod(DataCatalogClient.entry_path)
+ parse_entry_path = staticmethod(DataCatalogClient.parse_entry_path)
+ entry_group_path = staticmethod(DataCatalogClient.entry_group_path)
+ parse_entry_group_path = staticmethod(DataCatalogClient.parse_entry_group_path)
+ tag_path = staticmethod(DataCatalogClient.tag_path)
+ parse_tag_path = staticmethod(DataCatalogClient.parse_tag_path)
+ tag_template_path = staticmethod(DataCatalogClient.tag_template_path)
+ parse_tag_template_path = staticmethod(DataCatalogClient.parse_tag_template_path)
tag_template_field_path = staticmethod(DataCatalogClient.tag_template_field_path)
+ parse_tag_template_field_path = staticmethod(
+ DataCatalogClient.parse_tag_template_field_path
+ )
- tag_template_path = staticmethod(DataCatalogClient.tag_template_path)
+ common_billing_account_path = staticmethod(
+ DataCatalogClient.common_billing_account_path
+ )
+ parse_common_billing_account_path = staticmethod(
+ DataCatalogClient.parse_common_billing_account_path
+ )
- entry_path = staticmethod(DataCatalogClient.entry_path)
+ common_folder_path = staticmethod(DataCatalogClient.common_folder_path)
+ parse_common_folder_path = staticmethod(DataCatalogClient.parse_common_folder_path)
- tag_path = staticmethod(DataCatalogClient.tag_path)
+ common_organization_path = staticmethod(DataCatalogClient.common_organization_path)
+ parse_common_organization_path = staticmethod(
+ DataCatalogClient.parse_common_organization_path
+ )
- entry_group_path = staticmethod(DataCatalogClient.entry_group_path)
+ common_project_path = staticmethod(DataCatalogClient.common_project_path)
+ parse_common_project_path = staticmethod(
+ DataCatalogClient.parse_common_project_path
+ )
+
+ common_location_path = staticmethod(DataCatalogClient.common_location_path)
+ parse_common_location_path = staticmethod(
+ DataCatalogClient.parse_common_location_path
+ )
from_service_account_file = DataCatalogClient.from_service_account_file
from_service_account_json = from_service_account_file
+ @property
+ def transport(self) -> DataCatalogTransport:
+ """Return the transport used by the client instance.
+
+ Returns:
+ DataCatalogTransport: The transport used by the client instance.
+ """
+ return self._client.transport
+
get_transport_class = functools.partial(
type(DataCatalogClient).get_transport_class, type(DataCatalogClient)
)
@@ -79,6 +116,7 @@ def __init__(
credentials: credentials.Credentials = None,
transport: Union[str, DataCatalogTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the data catalog client.
@@ -94,16 +132,19 @@ def __init__(
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
- default endpoint provided by the client. GOOGLE_API_USE_MTLS
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
- use the default regular endpoint, this is the default value for
- the environment variable) and "auto" (auto switch to the default
- mTLS endpoint if client SSL credentials is present). However,
- the ``api_endpoint`` property takes precedence if provided.
- (2) The ``client_cert_source`` property is used to provide client
- SSL credentials for mutual TLS transport. If not provided, the
- default SSL credentials will be used if present.
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
@@ -111,7 +152,10 @@ def __init__(
"""
self._client = DataCatalogClient(
- credentials=credentials, transport=transport, client_options=client_options,
+ credentials=credentials,
+ transport=transport,
+ client_options=client_options,
+ client_info=client_info,
)
async def search_catalog(
@@ -192,7 +236,8 @@ async def search_catalog(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([scope, query]):
+ has_flattened_params = any([scope, query])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -219,7 +264,7 @@ async def search_catalog(
predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Send the request.
@@ -315,7 +360,8 @@ async def create_entry_group(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent, entry_group_id, entry_group]):
+ has_flattened_params = any([parent, entry_group_id, entry_group])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -338,7 +384,7 @@ async def create_entry_group(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_entry_group,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -398,7 +444,8 @@ async def get_entry_group(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name, read_mask]):
+ has_flattened_params = any([name, read_mask])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -425,7 +472,7 @@ async def get_entry_group(
predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -490,7 +537,8 @@ async def update_entry_group(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([entry_group, update_mask]):
+ has_flattened_params = any([entry_group, update_mask])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -511,7 +559,7 @@ async def update_entry_group(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_entry_group,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -564,7 +612,8 @@ async def delete_entry_group(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name]):
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -583,7 +632,7 @@ async def delete_entry_group(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_entry_group,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -640,7 +689,8 @@ async def list_entry_groups(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent]):
+ has_flattened_params = any([parent])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -665,7 +715,7 @@ async def list_entry_groups(
predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -759,7 +809,8 @@ async def create_entry(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent, entry_id, entry]):
+ has_flattened_params = any([parent, entry_id, entry])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -782,7 +833,7 @@ async def create_entry(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_entry,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -878,7 +929,8 @@ async def update_entry(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([entry, update_mask]):
+ has_flattened_params = any([entry, update_mask])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -899,7 +951,7 @@ async def update_entry(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_entry,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -954,7 +1006,8 @@ async def delete_entry(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name]):
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -973,7 +1026,7 @@ async def delete_entry(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_entry,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -1034,7 +1087,8 @@ async def get_entry(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name]):
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -1059,7 +1113,7 @@ async def get_entry(
predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -1128,7 +1182,7 @@ async def lookup_entry(
predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Send the request.
@@ -1179,7 +1233,8 @@ async def list_entries(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent]):
+ has_flattened_params = any([parent])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -1204,7 +1259,7 @@ async def list_entries(
predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -1291,7 +1346,8 @@ async def create_tag_template(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent, tag_template_id, tag_template]):
+ has_flattened_params = any([parent, tag_template_id, tag_template])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -1314,7 +1370,7 @@ async def create_tag_template(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_tag_template,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -1374,7 +1430,8 @@ async def get_tag_template(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name]):
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -1393,7 +1450,7 @@ async def get_tag_template(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_tag_template,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -1474,7 +1531,8 @@ async def update_tag_template(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([tag_template, update_mask]):
+ has_flattened_params = any([tag_template, update_mask])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -1495,7 +1553,7 @@ async def update_tag_template(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_tag_template,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -1558,7 +1616,8 @@ async def delete_tag_template(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name, force]):
+ has_flattened_params = any([name, force])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -1579,7 +1638,7 @@ async def delete_tag_template(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_tag_template,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -1657,9 +1716,8 @@ async def create_tag_template_field(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any(
- [parent, tag_template_field_id, tag_template_field]
- ):
+ has_flattened_params = any([parent, tag_template_field_id, tag_template_field])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -1682,7 +1740,7 @@ async def create_tag_template_field(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_tag_template_field,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -1766,7 +1824,8 @@ async def update_tag_template_field(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name, tag_template_field, update_mask]):
+ has_flattened_params = any([name, tag_template_field, update_mask])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -1789,7 +1848,7 @@ async def update_tag_template_field(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_tag_template_field,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -1853,7 +1912,8 @@ async def rename_tag_template_field(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name, new_tag_template_field_id]):
+ has_flattened_params = any([name, new_tag_template_field_id])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -1874,7 +1934,7 @@ async def rename_tag_template_field(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.rename_tag_template_field,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -1936,7 +1996,8 @@ async def delete_tag_template_field(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name, force]):
+ has_flattened_params = any([name, force])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -1957,7 +2018,7 @@ async def delete_tag_template_field(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_tag_template_field,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -2030,7 +2091,8 @@ async def create_tag(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent, tag]):
+ has_flattened_params = any([parent, tag])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -2051,7 +2113,7 @@ async def create_tag(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_tag,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -2117,7 +2179,8 @@ async def update_tag(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([tag, update_mask]):
+ has_flattened_params = any([tag, update_mask])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -2138,7 +2201,7 @@ async def update_tag(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_tag,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -2185,7 +2248,8 @@ async def delete_tag(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name]):
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -2204,7 +2268,7 @@ async def delete_tag(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_tag,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -2265,7 +2329,8 @@ async def list_tags(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent]):
+ has_flattened_params = any([parent])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -2290,7 +2355,7 @@ async def list_tags(
predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -2429,7 +2494,8 @@ async def set_iam_policy(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([resource]):
+ has_flattened_params = any([resource])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -2441,20 +2507,14 @@ async def set_iam_policy(
request = iam_policy.SetIamPolicyRequest(**request)
elif not request:
- request = iam_policy.SetIamPolicyRequest()
-
- # If we have keyword arguments corresponding to fields on the
- # request, apply these.
-
- if resource is not None:
- request.resource = resource
+ request = iam_policy.SetIamPolicyRequest(resource=resource,)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.set_iam_policy,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -2591,7 +2651,8 @@ async def get_iam_policy(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([resource]):
+ has_flattened_params = any([resource])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -2603,13 +2664,7 @@ async def get_iam_policy(
request = iam_policy.GetIamPolicyRequest(**request)
elif not request:
- request = iam_policy.GetIamPolicyRequest()
-
- # If we have keyword arguments corresponding to fields on the
- # request, apply these.
-
- if resource is not None:
- request.resource = resource
+ request = iam_policy.GetIamPolicyRequest(resource=resource,)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
@@ -2622,7 +2677,7 @@ async def get_iam_policy(
predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -2687,7 +2742,7 @@ async def test_iam_permissions(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.test_iam_permissions,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -2704,13 +2759,13 @@ async def test_iam_permissions(
try:
- _client_info = gapic_v1.client_info.ClientInfo(
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-datacatalog",
).version,
)
except pkg_resources.DistributionNotFound:
- _client_info = gapic_v1.client_info.ClientInfo()
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("DataCatalogAsyncClient",)
diff --git a/google/cloud/datacatalog_v1/services/data_catalog/client.py b/google/cloud/datacatalog_v1/services/data_catalog/client.py
index b0e61bf2..dae42e6a 100644
--- a/google/cloud/datacatalog_v1/services/data_catalog/client.py
+++ b/google/cloud/datacatalog_v1/services/data_catalog/client.py
@@ -16,17 +16,19 @@
#
from collections import OrderedDict
+from distutils import util
import os
import re
-from typing import Callable, Dict, Sequence, Tuple, Type, Union
+from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
-import google.api_core.client_options as ClientOptions # type: ignore
+from google.api_core import client_options as client_options_lib # type: ignore
from google.api_core import exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials # type: ignore
from google.auth.transport import mtls # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
@@ -43,7 +45,7 @@
from google.iam.v1 import policy_pb2 as policy # type: ignore
from google.protobuf import field_mask_pb2 as field_mask # type: ignore
-from .transports.base import DataCatalogTransport
+from .transports.base import DataCatalogTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import DataCatalogGrpcTransport
from .transports.grpc_asyncio import DataCatalogGrpcAsyncIOTransport
@@ -138,6 +140,15 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
from_service_account_json = from_service_account_file
+ @property
+ def transport(self) -> DataCatalogTransport:
+ """Return the transport used by the client instance.
+
+ Returns:
+ DataCatalogTransport: The transport used by the client instance.
+ """
+ return self._transport
+
@staticmethod
def entry_path(project: str, location: str, entry_group: str, entry: str,) -> str:
"""Return a fully-qualified entry string."""
@@ -226,12 +237,72 @@ def parse_tag_template_field_path(path: str) -> Dict[str, str]:
)
return m.groupdict() if m else {}
+ @staticmethod
+ def common_billing_account_path(billing_account: str,) -> str:
+ """Return a fully-qualified billing_account string."""
+ return "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+
+ @staticmethod
+ def parse_common_billing_account_path(path: str) -> Dict[str, str]:
+ """Parse a billing_account path into its component segments."""
+ m = re.match(r"^billingAccounts/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_folder_path(folder: str,) -> str:
+ """Return a fully-qualified folder string."""
+ return "folders/{folder}".format(folder=folder,)
+
+ @staticmethod
+ def parse_common_folder_path(path: str) -> Dict[str, str]:
+ """Parse a folder path into its component segments."""
+ m = re.match(r"^folders/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_organization_path(organization: str,) -> str:
+ """Return a fully-qualified organization string."""
+ return "organizations/{organization}".format(organization=organization,)
+
+ @staticmethod
+ def parse_common_organization_path(path: str) -> Dict[str, str]:
+ """Parse a organization path into its component segments."""
+ m = re.match(r"^organizations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_project_path(project: str,) -> str:
+ """Return a fully-qualified project string."""
+ return "projects/{project}".format(project=project,)
+
+ @staticmethod
+ def parse_common_project_path(path: str) -> Dict[str, str]:
+ """Parse a project path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_location_path(project: str, location: str,) -> str:
+ """Return a fully-qualified location string."""
+ return "projects/{project}/locations/{location}".format(
+ project=project, location=location,
+ )
+
+ @staticmethod
+ def parse_common_location_path(path: str) -> Dict[str, str]:
+ """Parse a location path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
def __init__(
self,
*,
- credentials: credentials.Credentials = None,
- transport: Union[str, DataCatalogTransport] = None,
- client_options: ClientOptions = None,
+ credentials: Optional[credentials.Credentials] = None,
+ transport: Union[str, DataCatalogTransport, None] = None,
+ client_options: Optional[client_options_lib.ClientOptions] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the data catalog client.
@@ -244,48 +315,74 @@ def __init__(
transport (Union[str, ~.DataCatalogTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
- client_options (ClientOptions): Custom options for the client. It
- won't take effect if a ``transport`` instance is provided.
+ client_options (client_options_lib.ClientOptions): Custom options for the
+ client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
- default endpoint provided by the client. GOOGLE_API_USE_MTLS
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
- use the default regular endpoint, this is the default value for
- the environment variable) and "auto" (auto switch to the default
- mTLS endpoint if client SSL credentials is present). However,
- the ``api_endpoint`` property takes precedence if provided.
- (2) The ``client_cert_source`` property is used to provide client
- SSL credentials for mutual TLS transport. If not provided, the
- default SSL credentials will be used if present.
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
- client_options = ClientOptions.from_dict(client_options)
+ client_options = client_options_lib.from_dict(client_options)
if client_options is None:
- client_options = ClientOptions.ClientOptions()
+ client_options = client_options_lib.ClientOptions()
+
+ # Create SSL credentials for mutual TLS if needed.
+ use_client_cert = bool(
+ util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
+ )
- if client_options.api_endpoint is None:
- use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never")
+ ssl_credentials = None
+ is_mtls = False
+ if use_client_cert:
+ if client_options.client_cert_source:
+ import grpc # type: ignore
+
+ cert, key = client_options.client_cert_source()
+ ssl_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ is_mtls = True
+ else:
+ creds = SslCredentials()
+ is_mtls = creds.is_mtls
+ ssl_credentials = creds.ssl_credentials if is_mtls else None
+
+ # Figure out which api endpoint to use.
+ if client_options.api_endpoint is not None:
+ api_endpoint = client_options.api_endpoint
+ else:
+ use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_mtls_env == "never":
- client_options.api_endpoint = self.DEFAULT_ENDPOINT
+ api_endpoint = self.DEFAULT_ENDPOINT
elif use_mtls_env == "always":
- client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
- has_client_cert_source = (
- client_options.client_cert_source is not None
- or mtls.has_default_client_cert_source()
- )
- client_options.api_endpoint = (
- self.DEFAULT_MTLS_ENDPOINT
- if has_client_cert_source
- else self.DEFAULT_ENDPOINT
+ api_endpoint = (
+ self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT
)
else:
raise MutualTLSChannelError(
- "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always"
+ "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
)
# Save or instantiate the transport.
@@ -309,11 +406,11 @@ def __init__(
self._transport = Transport(
credentials=credentials,
credentials_file=client_options.credentials_file,
- host=client_options.api_endpoint,
+ host=api_endpoint,
scopes=client_options.scopes,
- api_mtls_endpoint=client_options.api_endpoint,
- client_cert_source=client_options.client_cert_source,
+ ssl_channel_credentials=ssl_credentials,
quota_project_id=client_options.quota_project_id,
+ client_info=client_info,
)
def search_catalog(
@@ -2655,13 +2752,7 @@ def set_iam_policy(
request = iam_policy.SetIamPolicyRequest(**request)
elif not request:
- request = iam_policy.SetIamPolicyRequest()
-
- # If we have keyword arguments corresponding to fields on the
- # request, apply these.
-
- if resource is not None:
- request.resource = resource
+ request = iam_policy.SetIamPolicyRequest(resource=resource,)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
@@ -2814,13 +2905,7 @@ def get_iam_policy(
request = iam_policy.GetIamPolicyRequest(**request)
elif not request:
- request = iam_policy.GetIamPolicyRequest()
-
- # If we have keyword arguments corresponding to fields on the
- # request, apply these.
-
- if resource is not None:
- request.resource = resource
+ request = iam_policy.GetIamPolicyRequest(resource=resource,)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
@@ -2901,13 +2986,13 @@ def test_iam_permissions(
try:
- _client_info = gapic_v1.client_info.ClientInfo(
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-datacatalog",
).version,
)
except pkg_resources.DistributionNotFound:
- _client_info = gapic_v1.client_info.ClientInfo()
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("DataCatalogClient",)
diff --git a/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py b/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py
index 326b640d..7a2f3159 100644
--- a/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py
+++ b/google/cloud/datacatalog_v1/services/data_catalog/transports/base.py
@@ -19,7 +19,7 @@
import typing
import pkg_resources
-from google import auth
+from google import auth # type: ignore
from google.api_core import exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
@@ -33,13 +33,13 @@
try:
- _client_info = gapic_v1.client_info.ClientInfo(
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-datacatalog",
).version,
)
except pkg_resources.DistributionNotFound:
- _client_info = gapic_v1.client_info.ClientInfo()
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
class DataCatalogTransport(abc.ABC):
@@ -55,6 +55,7 @@ def __init__(
credentials_file: typing.Optional[str] = None,
scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES,
quota_project_id: typing.Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
**kwargs,
) -> None:
"""Instantiate the transport.
@@ -72,6 +73,11 @@ def __init__(
scope (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
@@ -99,9 +105,9 @@ def __init__(
self._credentials = credentials
# Lifted into its own function so it can be stubbed out during tests.
- self._prep_wrapped_messages()
+ self._prep_wrapped_messages(client_info)
- def _prep_wrapped_messages(self):
+ def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
self.search_catalog: gapic_v1.method.wrap_method(
@@ -113,10 +119,10 @@ def _prep_wrapped_messages(self):
predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=client_info,
),
self.create_entry_group: gapic_v1.method.wrap_method(
- self.create_entry_group, default_timeout=None, client_info=_client_info,
+ self.create_entry_group, default_timeout=None, client_info=client_info,
),
self.get_entry_group: gapic_v1.method.wrap_method(
self.get_entry_group,
@@ -127,13 +133,13 @@ def _prep_wrapped_messages(self):
predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=client_info,
),
self.update_entry_group: gapic_v1.method.wrap_method(
- self.update_entry_group, default_timeout=None, client_info=_client_info,
+ self.update_entry_group, default_timeout=None, client_info=client_info,
),
self.delete_entry_group: gapic_v1.method.wrap_method(
- self.delete_entry_group, default_timeout=None, client_info=_client_info,
+ self.delete_entry_group, default_timeout=None, client_info=client_info,
),
self.list_entry_groups: gapic_v1.method.wrap_method(
self.list_entry_groups,
@@ -144,16 +150,16 @@ def _prep_wrapped_messages(self):
predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=client_info,
),
self.create_entry: gapic_v1.method.wrap_method(
- self.create_entry, default_timeout=None, client_info=_client_info,
+ self.create_entry, default_timeout=None, client_info=client_info,
),
self.update_entry: gapic_v1.method.wrap_method(
- self.update_entry, default_timeout=None, client_info=_client_info,
+ self.update_entry, default_timeout=None, client_info=client_info,
),
self.delete_entry: gapic_v1.method.wrap_method(
- self.delete_entry, default_timeout=None, client_info=_client_info,
+ self.delete_entry, default_timeout=None, client_info=client_info,
),
self.get_entry: gapic_v1.method.wrap_method(
self.get_entry,
@@ -164,7 +170,7 @@ def _prep_wrapped_messages(self):
predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=client_info,
),
self.lookup_entry: gapic_v1.method.wrap_method(
self.lookup_entry,
@@ -175,7 +181,7 @@ def _prep_wrapped_messages(self):
predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=client_info,
),
self.list_entries: gapic_v1.method.wrap_method(
self.list_entries,
@@ -186,54 +192,48 @@ def _prep_wrapped_messages(self):
predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=client_info,
),
self.create_tag_template: gapic_v1.method.wrap_method(
- self.create_tag_template,
- default_timeout=None,
- client_info=_client_info,
+ self.create_tag_template, default_timeout=None, client_info=client_info,
),
self.get_tag_template: gapic_v1.method.wrap_method(
- self.get_tag_template, default_timeout=None, client_info=_client_info,
+ self.get_tag_template, default_timeout=None, client_info=client_info,
),
self.update_tag_template: gapic_v1.method.wrap_method(
- self.update_tag_template,
- default_timeout=None,
- client_info=_client_info,
+ self.update_tag_template, default_timeout=None, client_info=client_info,
),
self.delete_tag_template: gapic_v1.method.wrap_method(
- self.delete_tag_template,
- default_timeout=None,
- client_info=_client_info,
+ self.delete_tag_template, default_timeout=None, client_info=client_info,
),
self.create_tag_template_field: gapic_v1.method.wrap_method(
self.create_tag_template_field,
default_timeout=None,
- client_info=_client_info,
+ client_info=client_info,
),
self.update_tag_template_field: gapic_v1.method.wrap_method(
self.update_tag_template_field,
default_timeout=None,
- client_info=_client_info,
+ client_info=client_info,
),
self.rename_tag_template_field: gapic_v1.method.wrap_method(
self.rename_tag_template_field,
default_timeout=None,
- client_info=_client_info,
+ client_info=client_info,
),
self.delete_tag_template_field: gapic_v1.method.wrap_method(
self.delete_tag_template_field,
default_timeout=None,
- client_info=_client_info,
+ client_info=client_info,
),
self.create_tag: gapic_v1.method.wrap_method(
- self.create_tag, default_timeout=None, client_info=_client_info,
+ self.create_tag, default_timeout=None, client_info=client_info,
),
self.update_tag: gapic_v1.method.wrap_method(
- self.update_tag, default_timeout=None, client_info=_client_info,
+ self.update_tag, default_timeout=None, client_info=client_info,
),
self.delete_tag: gapic_v1.method.wrap_method(
- self.delete_tag, default_timeout=None, client_info=_client_info,
+ self.delete_tag, default_timeout=None, client_info=client_info,
),
self.list_tags: gapic_v1.method.wrap_method(
self.list_tags,
@@ -244,10 +244,10 @@ def _prep_wrapped_messages(self):
predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=client_info,
),
self.set_iam_policy: gapic_v1.method.wrap_method(
- self.set_iam_policy, default_timeout=None, client_info=_client_info,
+ self.set_iam_policy, default_timeout=None, client_info=client_info,
),
self.get_iam_policy: gapic_v1.method.wrap_method(
self.get_iam_policy,
@@ -258,12 +258,12 @@ def _prep_wrapped_messages(self):
predicate=retries.if_exception_type(exceptions.ServiceUnavailable,),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=client_info,
),
self.test_iam_permissions: gapic_v1.method.wrap_method(
self.test_iam_permissions,
default_timeout=None,
- client_info=_client_info,
+ client_info=client_info,
),
}
diff --git a/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py b/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py
index 9de2ca50..9150d2ac 100644
--- a/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py
+++ b/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc.py
@@ -15,14 +15,15 @@
# limitations under the License.
#
+import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple
from google.api_core import grpc_helpers # type: ignore
+from google.api_core import gapic_v1 # type: ignore
from google import auth # type: ignore
from google.auth import credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
-
import grpc # type: ignore
from google.cloud.datacatalog_v1.types import datacatalog
@@ -31,7 +32,7 @@
from google.iam.v1 import policy_pb2 as policy # type: ignore
from google.protobuf import empty_pb2 as empty # type: ignore
-from .base import DataCatalogTransport
+from .base import DataCatalogTransport, DEFAULT_CLIENT_INFO
class DataCatalogGrpcTransport(DataCatalogTransport):
@@ -60,7 +61,9 @@ def __init__(
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
- quota_project_id: Optional[str] = None
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
+ quota_project_id: Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
@@ -79,16 +82,23 @@ def __init__(
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
- api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If
- provided, it overrides the ``host`` argument and tries to create
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
- client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A
- callback to provide client SSL certificate bytes and private key
- bytes, both in PEM format. It is ignored if ``api_mtls_endpoint``
- is None.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
@@ -96,6 +106,8 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._ssl_channel_credentials = ssl_channel_credentials
+
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
@@ -103,7 +115,13 @@ def __init__(
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
+ self._ssl_channel_credentials = None
elif api_mtls_endpoint:
+ warnings.warn(
+ "api_mtls_endpoint and client_cert_source are deprecated",
+ DeprecationWarning,
+ )
+
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
@@ -134,6 +152,24 @@ def __init__(
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
)
+ self._ssl_channel_credentials = ssl_credentials
+ else:
+ host = host if ":" in host else host + ":443"
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_channel_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
self._stubs = {} # type: Dict[str, Callable]
@@ -144,6 +180,7 @@ def __init__(
credentials_file=credentials_file,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
+ client_info=client_info,
)
@classmethod
@@ -154,7 +191,7 @@ def create_channel(
credentials_file: str = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
- **kwargs
+ **kwargs,
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
@@ -188,24 +225,13 @@ def create_channel(
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
- **kwargs
+ **kwargs,
)
@property
def grpc_channel(self) -> grpc.Channel:
- """Create the channel designed to connect to this service.
-
- This property caches on the instance; repeated calls return
- the same channel.
+ """Return the channel designed to connect to this service.
"""
- # Sanity check: Only create a new channel if we do not already
- # have one.
- if not hasattr(self, "_grpc_channel"):
- self._grpc_channel = self.create_channel(
- self._host, credentials=self._credentials,
- )
-
- # Return the channel from cache.
return self._grpc_channel
@property
diff --git a/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py b/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py
index 24fdb5c9..49b84e3a 100644
--- a/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py
+++ b/google/cloud/datacatalog_v1/services/data_catalog/transports/grpc_asyncio.py
@@ -15,9 +15,12 @@
# limitations under the License.
#
+import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple
+from google.api_core import gapic_v1 # type: ignore
from google.api_core import grpc_helpers_async # type: ignore
+from google import auth # type: ignore
from google.auth import credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
@@ -30,7 +33,7 @@
from google.iam.v1 import policy_pb2 as policy # type: ignore
from google.protobuf import empty_pb2 as empty # type: ignore
-from .base import DataCatalogTransport
+from .base import DataCatalogTransport, DEFAULT_CLIENT_INFO
from .grpc import DataCatalogGrpcTransport
@@ -102,7 +105,9 @@ def __init__(
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
quota_project_id=None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
@@ -122,16 +127,23 @@ def __init__(
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
- api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If
- provided, it overrides the ``host`` argument and tries to create
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
- client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A
- callback to provide client SSL certificate bytes and private key
- bytes, both in PEM format. It is ignored if ``api_mtls_endpoint``
- is None.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
@@ -139,6 +151,8 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._ssl_channel_credentials = ssl_channel_credentials
+
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
@@ -146,13 +160,24 @@ def __init__(
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
+ self._ssl_channel_credentials = None
elif api_mtls_endpoint:
+ warnings.warn(
+ "api_mtls_endpoint and client_cert_source are deprecated",
+ DeprecationWarning,
+ )
+
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
else api_mtls_endpoint + ":443"
)
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
@@ -172,6 +197,24 @@ def __init__(
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
)
+ self._ssl_channel_credentials = ssl_credentials
+ else:
+ host = host if ":" in host else host + ":443"
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_channel_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
# Run the base constructor.
super().__init__(
@@ -180,6 +223,7 @@ def __init__(
credentials_file=credentials_file,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
+ client_info=client_info,
)
self._stubs = {}
@@ -191,13 +235,6 @@ def grpc_channel(self) -> aio.Channel:
This property caches on the instance; repeated calls return
the same channel.
"""
- # Sanity check: Only create a new channel if we do not already
- # have one.
- if not hasattr(self, "_grpc_channel"):
- self._grpc_channel = self.create_channel(
- self._host, credentials=self._credentials,
- )
-
# Return the channel from cache.
return self._grpc_channel
diff --git a/google/cloud/datacatalog_v1/types/datacatalog.py b/google/cloud/datacatalog_v1/types/datacatalog.py
index fdb31546..a02ed993 100644
--- a/google/cloud/datacatalog_v1/types/datacatalog.py
+++ b/google/cloud/datacatalog_v1/types/datacatalog.py
@@ -537,7 +537,7 @@ class Entry(proto.Message):
Output only when Entry is of type in the EntryType enum. For
entries with user_specified_type, this field is optional and
defaults to an empty string.
- type (~.datacatalog.EntryType):
+ type_ (~.datacatalog.EntryType):
The type of the entry.
Only used for Entries with types in the
EntryType enum.
@@ -601,7 +601,7 @@ class Entry(proto.Message):
linked_resource = proto.Field(proto.STRING, number=9)
- type = proto.Field(proto.ENUM, number=2, oneof="entry_type", enum="EntryType",)
+ type_ = proto.Field(proto.ENUM, number=2, oneof="entry_type", enum="EntryType",)
user_specified_type = proto.Field(proto.STRING, number=16, oneof="entry_type")
@@ -1034,7 +1034,7 @@ class ListEntriesResponse(proto.Message):
def raw_page(self):
return self
- entries = proto.RepeatedField(proto.MESSAGE, number=1, message=Entry,)
+ entries = proto.RepeatedField(proto.MESSAGE, number=1, message="Entry",)
next_page_token = proto.Field(proto.STRING, number=2)
diff --git a/google/cloud/datacatalog_v1/types/schema.py b/google/cloud/datacatalog_v1/types/schema.py
index 4a51a122..98560462 100644
--- a/google/cloud/datacatalog_v1/types/schema.py
+++ b/google/cloud/datacatalog_v1/types/schema.py
@@ -42,7 +42,7 @@ class ColumnSchema(proto.Message):
Attributes:
column (str):
Required. Name of the column.
- type (str):
+ type_ (str):
Required. Type of the column.
description (str):
Optional. Description of the column. Default
@@ -59,7 +59,7 @@ class ColumnSchema(proto.Message):
column = proto.Field(proto.STRING, number=6)
- type = proto.Field(proto.STRING, number=1)
+ type_ = proto.Field(proto.STRING, number=1)
description = proto.Field(proto.STRING, number=2)
diff --git a/google/cloud/datacatalog_v1/types/tags.py b/google/cloud/datacatalog_v1/types/tags.py
index 8e6e94e0..4ef4efa7 100644
--- a/google/cloud/datacatalog_v1/types/tags.py
+++ b/google/cloud/datacatalog_v1/types/tags.py
@@ -202,7 +202,7 @@ class TagTemplateField(proto.Message):
display_name (str):
The display name for this field. Defaults to
an empty string.
- type (~.tags.FieldType):
+ type_ (~.tags.FieldType):
Required. The type of value this tag field
can contain.
is_required (bool):
@@ -221,7 +221,7 @@ class TagTemplateField(proto.Message):
display_name = proto.Field(proto.STRING, number=1)
- type = proto.Field(proto.MESSAGE, number=2, message="FieldType",)
+ type_ = proto.Field(proto.MESSAGE, number=2, message="FieldType",)
is_required = proto.Field(proto.BOOL, number=3)
diff --git a/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py b/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py
index ee21855f..bec3d14c 100644
--- a/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py
+++ b/google/cloud/datacatalog_v1beta1/services/data_catalog/async_client.py
@@ -41,7 +41,7 @@
from google.iam.v1 import policy_pb2 as policy # type: ignore
from google.protobuf import field_mask_pb2 as field_mask # type: ignore
-from .transports.base import DataCatalogTransport
+from .transports.base import DataCatalogTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import DataCatalogGrpcAsyncIOTransport
from .client import DataCatalogClient
@@ -56,19 +56,56 @@ class DataCatalogAsyncClient:
DEFAULT_ENDPOINT = DataCatalogClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = DataCatalogClient.DEFAULT_MTLS_ENDPOINT
+ entry_path = staticmethod(DataCatalogClient.entry_path)
+ parse_entry_path = staticmethod(DataCatalogClient.parse_entry_path)
+ entry_group_path = staticmethod(DataCatalogClient.entry_group_path)
+ parse_entry_group_path = staticmethod(DataCatalogClient.parse_entry_group_path)
+ tag_path = staticmethod(DataCatalogClient.tag_path)
+ parse_tag_path = staticmethod(DataCatalogClient.parse_tag_path)
tag_template_path = staticmethod(DataCatalogClient.tag_template_path)
+ parse_tag_template_path = staticmethod(DataCatalogClient.parse_tag_template_path)
+ tag_template_field_path = staticmethod(DataCatalogClient.tag_template_field_path)
+ parse_tag_template_field_path = staticmethod(
+ DataCatalogClient.parse_tag_template_field_path
+ )
- entry_path = staticmethod(DataCatalogClient.entry_path)
+ common_billing_account_path = staticmethod(
+ DataCatalogClient.common_billing_account_path
+ )
+ parse_common_billing_account_path = staticmethod(
+ DataCatalogClient.parse_common_billing_account_path
+ )
- entry_group_path = staticmethod(DataCatalogClient.entry_group_path)
+ common_folder_path = staticmethod(DataCatalogClient.common_folder_path)
+ parse_common_folder_path = staticmethod(DataCatalogClient.parse_common_folder_path)
- tag_template_field_path = staticmethod(DataCatalogClient.tag_template_field_path)
+ common_organization_path = staticmethod(DataCatalogClient.common_organization_path)
+ parse_common_organization_path = staticmethod(
+ DataCatalogClient.parse_common_organization_path
+ )
- tag_path = staticmethod(DataCatalogClient.tag_path)
+ common_project_path = staticmethod(DataCatalogClient.common_project_path)
+ parse_common_project_path = staticmethod(
+ DataCatalogClient.parse_common_project_path
+ )
+
+ common_location_path = staticmethod(DataCatalogClient.common_location_path)
+ parse_common_location_path = staticmethod(
+ DataCatalogClient.parse_common_location_path
+ )
from_service_account_file = DataCatalogClient.from_service_account_file
from_service_account_json = from_service_account_file
+ @property
+ def transport(self) -> DataCatalogTransport:
+ """Return the transport used by the client instance.
+
+ Returns:
+ DataCatalogTransport: The transport used by the client instance.
+ """
+ return self._client.transport
+
get_transport_class = functools.partial(
type(DataCatalogClient).get_transport_class, type(DataCatalogClient)
)
@@ -79,6 +116,7 @@ def __init__(
credentials: credentials.Credentials = None,
transport: Union[str, DataCatalogTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the data catalog client.
@@ -94,16 +132,19 @@ def __init__(
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
- default endpoint provided by the client. GOOGLE_API_USE_MTLS
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
- use the default regular endpoint, this is the default value for
- the environment variable) and "auto" (auto switch to the default
- mTLS endpoint if client SSL credentials is present). However,
- the ``api_endpoint`` property takes precedence if provided.
- (2) The ``client_cert_source`` property is used to provide client
- SSL credentials for mutual TLS transport. If not provided, the
- default SSL credentials will be used if present.
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
@@ -111,7 +152,10 @@ def __init__(
"""
self._client = DataCatalogClient(
- credentials=credentials, transport=transport, client_options=client_options,
+ credentials=credentials,
+ transport=transport,
+ client_options=client_options,
+ client_info=client_info,
)
async def search_catalog(
@@ -192,7 +236,8 @@ async def search_catalog(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([scope, query]):
+ has_flattened_params = any([scope, query])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -213,7 +258,7 @@ async def search_catalog(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.search_catalog,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Send the request.
@@ -297,7 +342,8 @@ async def create_entry_group(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent, entry_group_id, entry_group]):
+ has_flattened_params = any([parent, entry_group_id, entry_group])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -320,7 +366,7 @@ async def create_entry_group(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_entry_group,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -386,7 +432,8 @@ async def update_entry_group(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([entry_group, update_mask]):
+ has_flattened_params = any([entry_group, update_mask])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -407,7 +454,7 @@ async def update_entry_group(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_entry_group,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -470,7 +517,8 @@ async def get_entry_group(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name, read_mask]):
+ has_flattened_params = any([name, read_mask])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -499,7 +547,7 @@ async def get_entry_group(
),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -550,7 +598,8 @@ async def delete_entry_group(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name]):
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -577,7 +626,7 @@ async def delete_entry_group(
),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -634,7 +683,8 @@ async def list_entry_groups(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent]):
+ has_flattened_params = any([parent])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -653,7 +703,7 @@ async def list_entry_groups(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_entry_groups,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -747,7 +797,8 @@ async def create_entry(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent, entry_id, entry]):
+ has_flattened_params = any([parent, entry_id, entry])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -770,7 +821,7 @@ async def create_entry(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_entry,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -866,7 +917,8 @@ async def update_entry(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([entry, update_mask]):
+ has_flattened_params = any([entry, update_mask])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -887,7 +939,7 @@ async def update_entry(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_entry,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -942,7 +994,8 @@ async def delete_entry(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name]):
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -969,7 +1022,7 @@ async def delete_entry(
),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -1030,7 +1083,8 @@ async def get_entry(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name]):
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -1057,7 +1111,7 @@ async def get_entry(
),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -1128,7 +1182,7 @@ async def lookup_entry(
),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Send the request.
@@ -1179,7 +1233,8 @@ async def list_entries(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent]):
+ has_flattened_params = any([parent])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -1198,7 +1253,7 @@ async def list_entries(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_entries,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -1285,7 +1340,8 @@ async def create_tag_template(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent, tag_template_id, tag_template]):
+ has_flattened_params = any([parent, tag_template_id, tag_template])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -1308,7 +1364,7 @@ async def create_tag_template(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_tag_template,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -1368,7 +1424,8 @@ async def get_tag_template(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name]):
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -1395,7 +1452,7 @@ async def get_tag_template(
),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -1476,7 +1533,8 @@ async def update_tag_template(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([tag_template, update_mask]):
+ has_flattened_params = any([tag_template, update_mask])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -1497,7 +1555,7 @@ async def update_tag_template(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_tag_template,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -1560,7 +1618,8 @@ async def delete_tag_template(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name, force]):
+ has_flattened_params = any([name, force])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -1589,7 +1648,7 @@ async def delete_tag_template(
),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -1667,9 +1726,8 @@ async def create_tag_template_field(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any(
- [parent, tag_template_field_id, tag_template_field]
- ):
+ has_flattened_params = any([parent, tag_template_field_id, tag_template_field])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -1692,7 +1750,7 @@ async def create_tag_template_field(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_tag_template_field,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -1776,7 +1834,8 @@ async def update_tag_template_field(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name, tag_template_field, update_mask]):
+ has_flattened_params = any([name, tag_template_field, update_mask])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -1799,7 +1858,7 @@ async def update_tag_template_field(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_tag_template_field,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -1863,7 +1922,8 @@ async def rename_tag_template_field(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name, new_tag_template_field_id]):
+ has_flattened_params = any([name, new_tag_template_field_id])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -1884,7 +1944,7 @@ async def rename_tag_template_field(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.rename_tag_template_field,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -1946,7 +2006,8 @@ async def delete_tag_template_field(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name, force]):
+ has_flattened_params = any([name, force])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -1975,7 +2036,7 @@ async def delete_tag_template_field(
),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -2049,7 +2110,8 @@ async def create_tag(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent, tag]):
+ has_flattened_params = any([parent, tag])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -2070,7 +2132,7 @@ async def create_tag(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_tag,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -2136,7 +2198,8 @@ async def update_tag(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([tag, update_mask]):
+ has_flattened_params = any([tag, update_mask])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -2157,7 +2220,7 @@ async def update_tag(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_tag,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -2204,7 +2267,8 @@ async def delete_tag(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name]):
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -2231,7 +2295,7 @@ async def delete_tag(
),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -2293,7 +2357,8 @@ async def list_tags(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent]):
+ has_flattened_params = any([parent])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -2320,7 +2385,7 @@ async def list_tags(
),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -2459,7 +2524,8 @@ async def set_iam_policy(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([resource]):
+ has_flattened_params = any([resource])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -2471,20 +2537,14 @@ async def set_iam_policy(
request = iam_policy.SetIamPolicyRequest(**request)
elif not request:
- request = iam_policy.SetIamPolicyRequest()
-
- # If we have keyword arguments corresponding to fields on the
- # request, apply these.
-
- if resource is not None:
- request.resource = resource
+ request = iam_policy.SetIamPolicyRequest(resource=resource,)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.set_iam_policy,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -2621,7 +2681,8 @@ async def get_iam_policy(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([resource]):
+ has_flattened_params = any([resource])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -2633,20 +2694,14 @@ async def get_iam_policy(
request = iam_policy.GetIamPolicyRequest(**request)
elif not request:
- request = iam_policy.GetIamPolicyRequest()
-
- # If we have keyword arguments corresponding to fields on the
- # request, apply these.
-
- if resource is not None:
- request.resource = resource
+ request = iam_policy.GetIamPolicyRequest(resource=resource,)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_iam_policy,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -2711,7 +2766,7 @@ async def test_iam_permissions(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.test_iam_permissions,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -2728,13 +2783,13 @@ async def test_iam_permissions(
try:
- _client_info = gapic_v1.client_info.ClientInfo(
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-datacatalog",
).version,
)
except pkg_resources.DistributionNotFound:
- _client_info = gapic_v1.client_info.ClientInfo()
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("DataCatalogAsyncClient",)
diff --git a/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py b/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py
index 08b4f5b7..da6b34fe 100644
--- a/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py
+++ b/google/cloud/datacatalog_v1beta1/services/data_catalog/client.py
@@ -16,17 +16,19 @@
#
from collections import OrderedDict
+from distutils import util
import os
import re
-from typing import Callable, Dict, Sequence, Tuple, Type, Union
+from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
-import google.api_core.client_options as ClientOptions # type: ignore
+from google.api_core import client_options as client_options_lib # type: ignore
from google.api_core import exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials # type: ignore
from google.auth.transport import mtls # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
@@ -43,7 +45,7 @@
from google.iam.v1 import policy_pb2 as policy # type: ignore
from google.protobuf import field_mask_pb2 as field_mask # type: ignore
-from .transports.base import DataCatalogTransport
+from .transports.base import DataCatalogTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import DataCatalogGrpcTransport
from .transports.grpc_asyncio import DataCatalogGrpcAsyncIOTransport
@@ -138,6 +140,15 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
from_service_account_json = from_service_account_file
+ @property
+ def transport(self) -> DataCatalogTransport:
+ """Return the transport used by the client instance.
+
+ Returns:
+ DataCatalogTransport: The transport used by the client instance.
+ """
+ return self._transport
+
@staticmethod
def entry_path(project: str, location: str, entry_group: str, entry: str,) -> str:
"""Return a fully-qualified entry string."""
@@ -226,12 +237,72 @@ def parse_tag_template_field_path(path: str) -> Dict[str, str]:
)
return m.groupdict() if m else {}
+ @staticmethod
+ def common_billing_account_path(billing_account: str,) -> str:
+ """Return a fully-qualified billing_account string."""
+ return "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+
+ @staticmethod
+ def parse_common_billing_account_path(path: str) -> Dict[str, str]:
+ """Parse a billing_account path into its component segments."""
+ m = re.match(r"^billingAccounts/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_folder_path(folder: str,) -> str:
+ """Return a fully-qualified folder string."""
+ return "folders/{folder}".format(folder=folder,)
+
+ @staticmethod
+ def parse_common_folder_path(path: str) -> Dict[str, str]:
+ """Parse a folder path into its component segments."""
+ m = re.match(r"^folders/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_organization_path(organization: str,) -> str:
+ """Return a fully-qualified organization string."""
+ return "organizations/{organization}".format(organization=organization,)
+
+ @staticmethod
+ def parse_common_organization_path(path: str) -> Dict[str, str]:
+ """Parse a organization path into its component segments."""
+ m = re.match(r"^organizations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_project_path(project: str,) -> str:
+ """Return a fully-qualified project string."""
+ return "projects/{project}".format(project=project,)
+
+ @staticmethod
+ def parse_common_project_path(path: str) -> Dict[str, str]:
+ """Parse a project path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_location_path(project: str, location: str,) -> str:
+ """Return a fully-qualified location string."""
+ return "projects/{project}/locations/{location}".format(
+ project=project, location=location,
+ )
+
+ @staticmethod
+ def parse_common_location_path(path: str) -> Dict[str, str]:
+ """Parse a location path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
def __init__(
self,
*,
- credentials: credentials.Credentials = None,
- transport: Union[str, DataCatalogTransport] = None,
- client_options: ClientOptions = None,
+ credentials: Optional[credentials.Credentials] = None,
+ transport: Union[str, DataCatalogTransport, None] = None,
+ client_options: Optional[client_options_lib.ClientOptions] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the data catalog client.
@@ -244,48 +315,74 @@ def __init__(
transport (Union[str, ~.DataCatalogTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
- client_options (ClientOptions): Custom options for the client. It
- won't take effect if a ``transport`` instance is provided.
+ client_options (client_options_lib.ClientOptions): Custom options for the
+ client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
- default endpoint provided by the client. GOOGLE_API_USE_MTLS
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
- use the default regular endpoint, this is the default value for
- the environment variable) and "auto" (auto switch to the default
- mTLS endpoint if client SSL credentials is present). However,
- the ``api_endpoint`` property takes precedence if provided.
- (2) The ``client_cert_source`` property is used to provide client
- SSL credentials for mutual TLS transport. If not provided, the
- default SSL credentials will be used if present.
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
- client_options = ClientOptions.from_dict(client_options)
+ client_options = client_options_lib.from_dict(client_options)
if client_options is None:
- client_options = ClientOptions.ClientOptions()
+ client_options = client_options_lib.ClientOptions()
+
+ # Create SSL credentials for mutual TLS if needed.
+ use_client_cert = bool(
+ util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
+ )
- if client_options.api_endpoint is None:
- use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never")
+ ssl_credentials = None
+ is_mtls = False
+ if use_client_cert:
+ if client_options.client_cert_source:
+ import grpc # type: ignore
+
+ cert, key = client_options.client_cert_source()
+ ssl_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ is_mtls = True
+ else:
+ creds = SslCredentials()
+ is_mtls = creds.is_mtls
+ ssl_credentials = creds.ssl_credentials if is_mtls else None
+
+ # Figure out which api endpoint to use.
+ if client_options.api_endpoint is not None:
+ api_endpoint = client_options.api_endpoint
+ else:
+ use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_mtls_env == "never":
- client_options.api_endpoint = self.DEFAULT_ENDPOINT
+ api_endpoint = self.DEFAULT_ENDPOINT
elif use_mtls_env == "always":
- client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
- has_client_cert_source = (
- client_options.client_cert_source is not None
- or mtls.has_default_client_cert_source()
- )
- client_options.api_endpoint = (
- self.DEFAULT_MTLS_ENDPOINT
- if has_client_cert_source
- else self.DEFAULT_ENDPOINT
+ api_endpoint = (
+ self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT
)
else:
raise MutualTLSChannelError(
- "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always"
+ "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
)
# Save or instantiate the transport.
@@ -309,11 +406,11 @@ def __init__(
self._transport = Transport(
credentials=credentials,
credentials_file=client_options.credentials_file,
- host=client_options.api_endpoint,
+ host=api_endpoint,
scopes=client_options.scopes,
- api_mtls_endpoint=client_options.api_endpoint,
- client_cert_source=client_options.client_cert_source,
+ ssl_channel_credentials=ssl_credentials,
quota_project_id=client_options.quota_project_id,
+ client_info=client_info,
)
def search_catalog(
@@ -2647,13 +2744,7 @@ def set_iam_policy(
request = iam_policy.SetIamPolicyRequest(**request)
elif not request:
- request = iam_policy.SetIamPolicyRequest()
-
- # If we have keyword arguments corresponding to fields on the
- # request, apply these.
-
- if resource is not None:
- request.resource = resource
+ request = iam_policy.SetIamPolicyRequest(resource=resource,)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
@@ -2806,13 +2897,7 @@ def get_iam_policy(
request = iam_policy.GetIamPolicyRequest(**request)
elif not request:
- request = iam_policy.GetIamPolicyRequest()
-
- # If we have keyword arguments corresponding to fields on the
- # request, apply these.
-
- if resource is not None:
- request.resource = resource
+ request = iam_policy.GetIamPolicyRequest(resource=resource,)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
@@ -2893,13 +2978,13 @@ def test_iam_permissions(
try:
- _client_info = gapic_v1.client_info.ClientInfo(
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-datacatalog",
).version,
)
except pkg_resources.DistributionNotFound:
- _client_info = gapic_v1.client_info.ClientInfo()
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("DataCatalogClient",)
diff --git a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py
index fac99233..38de8373 100644
--- a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py
+++ b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/base.py
@@ -19,7 +19,7 @@
import typing
import pkg_resources
-from google import auth
+from google import auth # type: ignore
from google.api_core import exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
@@ -33,13 +33,13 @@
try:
- _client_info = gapic_v1.client_info.ClientInfo(
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-datacatalog",
).version,
)
except pkg_resources.DistributionNotFound:
- _client_info = gapic_v1.client_info.ClientInfo()
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
class DataCatalogTransport(abc.ABC):
@@ -55,6 +55,7 @@ def __init__(
credentials_file: typing.Optional[str] = None,
scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES,
quota_project_id: typing.Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
**kwargs,
) -> None:
"""Instantiate the transport.
@@ -72,6 +73,11 @@ def __init__(
scope (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
@@ -99,19 +105,19 @@ def __init__(
self._credentials = credentials
# Lifted into its own function so it can be stubbed out during tests.
- self._prep_wrapped_messages()
+ self._prep_wrapped_messages(client_info)
- def _prep_wrapped_messages(self):
+ def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
self.search_catalog: gapic_v1.method.wrap_method(
- self.search_catalog, default_timeout=None, client_info=_client_info,
+ self.search_catalog, default_timeout=None, client_info=client_info,
),
self.create_entry_group: gapic_v1.method.wrap_method(
- self.create_entry_group, default_timeout=None, client_info=_client_info,
+ self.create_entry_group, default_timeout=None, client_info=client_info,
),
self.update_entry_group: gapic_v1.method.wrap_method(
- self.update_entry_group, default_timeout=None, client_info=_client_info,
+ self.update_entry_group, default_timeout=None, client_info=client_info,
),
self.get_entry_group: gapic_v1.method.wrap_method(
self.get_entry_group,
@@ -124,7 +130,7 @@ def _prep_wrapped_messages(self):
),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=client_info,
),
self.delete_entry_group: gapic_v1.method.wrap_method(
self.delete_entry_group,
@@ -137,16 +143,16 @@ def _prep_wrapped_messages(self):
),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=client_info,
),
self.list_entry_groups: gapic_v1.method.wrap_method(
- self.list_entry_groups, default_timeout=None, client_info=_client_info,
+ self.list_entry_groups, default_timeout=None, client_info=client_info,
),
self.create_entry: gapic_v1.method.wrap_method(
- self.create_entry, default_timeout=None, client_info=_client_info,
+ self.create_entry, default_timeout=None, client_info=client_info,
),
self.update_entry: gapic_v1.method.wrap_method(
- self.update_entry, default_timeout=None, client_info=_client_info,
+ self.update_entry, default_timeout=None, client_info=client_info,
),
self.delete_entry: gapic_v1.method.wrap_method(
self.delete_entry,
@@ -159,7 +165,7 @@ def _prep_wrapped_messages(self):
),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=client_info,
),
self.get_entry: gapic_v1.method.wrap_method(
self.get_entry,
@@ -172,7 +178,7 @@ def _prep_wrapped_messages(self):
),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=client_info,
),
self.lookup_entry: gapic_v1.method.wrap_method(
self.lookup_entry,
@@ -185,15 +191,13 @@ def _prep_wrapped_messages(self):
),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=client_info,
),
self.list_entries: gapic_v1.method.wrap_method(
- self.list_entries, default_timeout=None, client_info=_client_info,
+ self.list_entries, default_timeout=None, client_info=client_info,
),
self.create_tag_template: gapic_v1.method.wrap_method(
- self.create_tag_template,
- default_timeout=None,
- client_info=_client_info,
+ self.create_tag_template, default_timeout=None, client_info=client_info,
),
self.get_tag_template: gapic_v1.method.wrap_method(
self.get_tag_template,
@@ -206,12 +210,10 @@ def _prep_wrapped_messages(self):
),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=client_info,
),
self.update_tag_template: gapic_v1.method.wrap_method(
- self.update_tag_template,
- default_timeout=None,
- client_info=_client_info,
+ self.update_tag_template, default_timeout=None, client_info=client_info,
),
self.delete_tag_template: gapic_v1.method.wrap_method(
self.delete_tag_template,
@@ -224,22 +226,22 @@ def _prep_wrapped_messages(self):
),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=client_info,
),
self.create_tag_template_field: gapic_v1.method.wrap_method(
self.create_tag_template_field,
default_timeout=None,
- client_info=_client_info,
+ client_info=client_info,
),
self.update_tag_template_field: gapic_v1.method.wrap_method(
self.update_tag_template_field,
default_timeout=None,
- client_info=_client_info,
+ client_info=client_info,
),
self.rename_tag_template_field: gapic_v1.method.wrap_method(
self.rename_tag_template_field,
default_timeout=None,
- client_info=_client_info,
+ client_info=client_info,
),
self.delete_tag_template_field: gapic_v1.method.wrap_method(
self.delete_tag_template_field,
@@ -252,13 +254,13 @@ def _prep_wrapped_messages(self):
),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=client_info,
),
self.create_tag: gapic_v1.method.wrap_method(
- self.create_tag, default_timeout=None, client_info=_client_info,
+ self.create_tag, default_timeout=None, client_info=client_info,
),
self.update_tag: gapic_v1.method.wrap_method(
- self.update_tag, default_timeout=None, client_info=_client_info,
+ self.update_tag, default_timeout=None, client_info=client_info,
),
self.delete_tag: gapic_v1.method.wrap_method(
self.delete_tag,
@@ -271,7 +273,7 @@ def _prep_wrapped_messages(self):
),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=client_info,
),
self.list_tags: gapic_v1.method.wrap_method(
self.list_tags,
@@ -284,18 +286,18 @@ def _prep_wrapped_messages(self):
),
),
default_timeout=60.0,
- client_info=_client_info,
+ client_info=client_info,
),
self.set_iam_policy: gapic_v1.method.wrap_method(
- self.set_iam_policy, default_timeout=None, client_info=_client_info,
+ self.set_iam_policy, default_timeout=None, client_info=client_info,
),
self.get_iam_policy: gapic_v1.method.wrap_method(
- self.get_iam_policy, default_timeout=None, client_info=_client_info,
+ self.get_iam_policy, default_timeout=None, client_info=client_info,
),
self.test_iam_permissions: gapic_v1.method.wrap_method(
self.test_iam_permissions,
default_timeout=None,
- client_info=_client_info,
+ client_info=client_info,
),
}
diff --git a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py
index 1b96a954..4a34e3f9 100644
--- a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py
+++ b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc.py
@@ -15,14 +15,15 @@
# limitations under the License.
#
+import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple
from google.api_core import grpc_helpers # type: ignore
+from google.api_core import gapic_v1 # type: ignore
from google import auth # type: ignore
from google.auth import credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
-
import grpc # type: ignore
from google.cloud.datacatalog_v1beta1.types import datacatalog
@@ -31,7 +32,7 @@
from google.iam.v1 import policy_pb2 as policy # type: ignore
from google.protobuf import empty_pb2 as empty # type: ignore
-from .base import DataCatalogTransport
+from .base import DataCatalogTransport, DEFAULT_CLIENT_INFO
class DataCatalogGrpcTransport(DataCatalogTransport):
@@ -60,7 +61,9 @@ def __init__(
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
- quota_project_id: Optional[str] = None
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
+ quota_project_id: Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
@@ -79,16 +82,23 @@ def __init__(
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
- api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If
- provided, it overrides the ``host`` argument and tries to create
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
- client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A
- callback to provide client SSL certificate bytes and private key
- bytes, both in PEM format. It is ignored if ``api_mtls_endpoint``
- is None.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
@@ -96,6 +106,8 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._ssl_channel_credentials = ssl_channel_credentials
+
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
@@ -103,7 +115,13 @@ def __init__(
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
+ self._ssl_channel_credentials = None
elif api_mtls_endpoint:
+ warnings.warn(
+ "api_mtls_endpoint and client_cert_source are deprecated",
+ DeprecationWarning,
+ )
+
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
@@ -134,6 +152,24 @@ def __init__(
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
)
+ self._ssl_channel_credentials = ssl_credentials
+ else:
+ host = host if ":" in host else host + ":443"
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_channel_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
self._stubs = {} # type: Dict[str, Callable]
@@ -144,6 +180,7 @@ def __init__(
credentials_file=credentials_file,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
+ client_info=client_info,
)
@classmethod
@@ -154,7 +191,7 @@ def create_channel(
credentials_file: str = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
- **kwargs
+ **kwargs,
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
@@ -188,24 +225,13 @@ def create_channel(
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
- **kwargs
+ **kwargs,
)
@property
def grpc_channel(self) -> grpc.Channel:
- """Create the channel designed to connect to this service.
-
- This property caches on the instance; repeated calls return
- the same channel.
+ """Return the channel designed to connect to this service.
"""
- # Sanity check: Only create a new channel if we do not already
- # have one.
- if not hasattr(self, "_grpc_channel"):
- self._grpc_channel = self.create_channel(
- self._host, credentials=self._credentials,
- )
-
- # Return the channel from cache.
return self._grpc_channel
@property
diff --git a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py
index 1d7f80fd..b8670aa2 100644
--- a/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py
+++ b/google/cloud/datacatalog_v1beta1/services/data_catalog/transports/grpc_asyncio.py
@@ -15,9 +15,12 @@
# limitations under the License.
#
+import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple
+from google.api_core import gapic_v1 # type: ignore
from google.api_core import grpc_helpers_async # type: ignore
+from google import auth # type: ignore
from google.auth import credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
@@ -30,7 +33,7 @@
from google.iam.v1 import policy_pb2 as policy # type: ignore
from google.protobuf import empty_pb2 as empty # type: ignore
-from .base import DataCatalogTransport
+from .base import DataCatalogTransport, DEFAULT_CLIENT_INFO
from .grpc import DataCatalogGrpcTransport
@@ -102,7 +105,9 @@ def __init__(
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
quota_project_id=None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
@@ -122,16 +127,23 @@ def __init__(
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
- api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If
- provided, it overrides the ``host`` argument and tries to create
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
- client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A
- callback to provide client SSL certificate bytes and private key
- bytes, both in PEM format. It is ignored if ``api_mtls_endpoint``
- is None.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
@@ -139,6 +151,8 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._ssl_channel_credentials = ssl_channel_credentials
+
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
@@ -146,13 +160,24 @@ def __init__(
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
+ self._ssl_channel_credentials = None
elif api_mtls_endpoint:
+ warnings.warn(
+ "api_mtls_endpoint and client_cert_source are deprecated",
+ DeprecationWarning,
+ )
+
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
else api_mtls_endpoint + ":443"
)
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
@@ -172,6 +197,24 @@ def __init__(
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
)
+ self._ssl_channel_credentials = ssl_credentials
+ else:
+ host = host if ":" in host else host + ":443"
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_channel_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
# Run the base constructor.
super().__init__(
@@ -180,6 +223,7 @@ def __init__(
credentials_file=credentials_file,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
+ client_info=client_info,
)
self._stubs = {}
@@ -191,13 +235,6 @@ def grpc_channel(self) -> aio.Channel:
This property caches on the instance; repeated calls return
the same channel.
"""
- # Sanity check: Only create a new channel if we do not already
- # have one.
- if not hasattr(self, "_grpc_channel"):
- self._grpc_channel = self.create_channel(
- self._host, credentials=self._credentials,
- )
-
# Return the channel from cache.
return self._grpc_channel
diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py
index de2eaeea..759d80df 100644
--- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py
+++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/async_client.py
@@ -33,7 +33,7 @@
from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore
from google.iam.v1 import policy_pb2 as policy # type: ignore
-from .transports.base import PolicyTagManagerTransport
+from .transports.base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import PolicyTagManagerGrpcAsyncIOTransport
from .client import PolicyTagManagerClient
@@ -48,13 +48,52 @@ class PolicyTagManagerAsyncClient:
DEFAULT_ENDPOINT = PolicyTagManagerClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = PolicyTagManagerClient.DEFAULT_MTLS_ENDPOINT
+ policy_tag_path = staticmethod(PolicyTagManagerClient.policy_tag_path)
+ parse_policy_tag_path = staticmethod(PolicyTagManagerClient.parse_policy_tag_path)
taxonomy_path = staticmethod(PolicyTagManagerClient.taxonomy_path)
+ parse_taxonomy_path = staticmethod(PolicyTagManagerClient.parse_taxonomy_path)
- policy_tag_path = staticmethod(PolicyTagManagerClient.policy_tag_path)
+ common_billing_account_path = staticmethod(
+ PolicyTagManagerClient.common_billing_account_path
+ )
+ parse_common_billing_account_path = staticmethod(
+ PolicyTagManagerClient.parse_common_billing_account_path
+ )
+
+ common_folder_path = staticmethod(PolicyTagManagerClient.common_folder_path)
+ parse_common_folder_path = staticmethod(
+ PolicyTagManagerClient.parse_common_folder_path
+ )
+
+ common_organization_path = staticmethod(
+ PolicyTagManagerClient.common_organization_path
+ )
+ parse_common_organization_path = staticmethod(
+ PolicyTagManagerClient.parse_common_organization_path
+ )
+
+ common_project_path = staticmethod(PolicyTagManagerClient.common_project_path)
+ parse_common_project_path = staticmethod(
+ PolicyTagManagerClient.parse_common_project_path
+ )
+
+ common_location_path = staticmethod(PolicyTagManagerClient.common_location_path)
+ parse_common_location_path = staticmethod(
+ PolicyTagManagerClient.parse_common_location_path
+ )
from_service_account_file = PolicyTagManagerClient.from_service_account_file
from_service_account_json = from_service_account_file
+ @property
+ def transport(self) -> PolicyTagManagerTransport:
+ """Return the transport used by the client instance.
+
+ Returns:
+ PolicyTagManagerTransport: The transport used by the client instance.
+ """
+ return self._client.transport
+
get_transport_class = functools.partial(
type(PolicyTagManagerClient).get_transport_class, type(PolicyTagManagerClient)
)
@@ -65,6 +104,7 @@ def __init__(
credentials: credentials.Credentials = None,
transport: Union[str, PolicyTagManagerTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the policy tag manager client.
@@ -80,16 +120,19 @@ def __init__(
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
- default endpoint provided by the client. GOOGLE_API_USE_MTLS
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
- use the default regular endpoint, this is the default value for
- the environment variable) and "auto" (auto switch to the default
- mTLS endpoint if client SSL credentials is present). However,
- the ``api_endpoint`` property takes precedence if provided.
- (2) The ``client_cert_source`` property is used to provide client
- SSL credentials for mutual TLS transport. If not provided, the
- default SSL credentials will be used if present.
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
@@ -97,7 +140,10 @@ def __init__(
"""
self._client = PolicyTagManagerClient(
- credentials=credentials, transport=transport, client_options=client_options,
+ credentials=credentials,
+ transport=transport,
+ client_options=client_options,
+ client_info=client_info,
)
async def create_taxonomy(
@@ -149,7 +195,8 @@ async def create_taxonomy(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent, taxonomy]):
+ has_flattened_params = any([parent, taxonomy])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -170,7 +217,7 @@ async def create_taxonomy(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_taxonomy,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -219,7 +266,8 @@ async def delete_taxonomy(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name]):
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -238,7 +286,7 @@ async def delete_taxonomy(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_taxonomy,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -294,7 +342,8 @@ async def update_taxonomy(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([taxonomy]):
+ has_flattened_params = any([taxonomy])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -313,7 +362,7 @@ async def update_taxonomy(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_taxonomy,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -371,7 +420,8 @@ async def list_taxonomies(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent]):
+ has_flattened_params = any([parent])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -390,7 +440,7 @@ async def list_taxonomies(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_taxonomies,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -453,7 +503,8 @@ async def get_taxonomy(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name]):
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -472,7 +523,7 @@ async def get_taxonomy(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_taxonomy,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -537,7 +588,8 @@ async def create_policy_tag(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent, policy_tag]):
+ has_flattened_params = any([parent, policy_tag])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -558,7 +610,7 @@ async def create_policy_tag(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.create_policy_tag,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -606,7 +658,8 @@ async def delete_policy_tag(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name]):
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -625,7 +678,7 @@ async def delete_policy_tag(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.delete_policy_tag,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -683,7 +736,8 @@ async def update_policy_tag(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([policy_tag]):
+ has_flattened_params = any([policy_tag])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -702,7 +756,7 @@ async def update_policy_tag(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.update_policy_tag,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -759,7 +813,8 @@ async def list_policy_tags(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([parent]):
+ has_flattened_params = any([parent])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -778,7 +833,7 @@ async def list_policy_tags(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.list_policy_tags,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -842,7 +897,8 @@ async def get_policy_tag(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([name]):
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -861,7 +917,7 @@ async def get_policy_tag(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_policy_tag,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -978,7 +1034,7 @@ async def get_iam_policy(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_iam_policy,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -1095,7 +1151,7 @@ async def set_iam_policy(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.set_iam_policy,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -1148,7 +1204,7 @@ async def test_iam_permissions(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.test_iam_permissions,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -1165,13 +1221,13 @@ async def test_iam_permissions(
try:
- _client_info = gapic_v1.client_info.ClientInfo(
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-datacatalog",
).version,
)
except pkg_resources.DistributionNotFound:
- _client_info = gapic_v1.client_info.ClientInfo()
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("PolicyTagManagerAsyncClient",)
diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py
index ac3eec4d..ffbb1f7f 100644
--- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py
+++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/client.py
@@ -16,17 +16,19 @@
#
from collections import OrderedDict
+from distutils import util
import os
import re
-from typing import Callable, Dict, Sequence, Tuple, Type, Union
+from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
-import google.api_core.client_options as ClientOptions # type: ignore
+from google.api_core import client_options as client_options_lib # type: ignore
from google.api_core import exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials # type: ignore
from google.auth.transport import mtls # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
@@ -35,7 +37,7 @@
from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore
from google.iam.v1 import policy_pb2 as policy # type: ignore
-from .transports.base import PolicyTagManagerTransport
+from .transports.base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import PolicyTagManagerGrpcTransport
from .transports.grpc_asyncio import PolicyTagManagerGrpcAsyncIOTransport
@@ -132,6 +134,15 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
from_service_account_json = from_service_account_file
+ @property
+ def transport(self) -> PolicyTagManagerTransport:
+ """Return the transport used by the client instance.
+
+ Returns:
+ PolicyTagManagerTransport: The transport used by the client instance.
+ """
+ return self._transport
+
@staticmethod
def policy_tag_path(
project: str, location: str, taxonomy: str, policy_tag: str,
@@ -169,12 +180,72 @@ def parse_taxonomy_path(path: str) -> Dict[str, str]:
)
return m.groupdict() if m else {}
+ @staticmethod
+ def common_billing_account_path(billing_account: str,) -> str:
+ """Return a fully-qualified billing_account string."""
+ return "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+
+ @staticmethod
+ def parse_common_billing_account_path(path: str) -> Dict[str, str]:
+ """Parse a billing_account path into its component segments."""
+ m = re.match(r"^billingAccounts/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_folder_path(folder: str,) -> str:
+ """Return a fully-qualified folder string."""
+ return "folders/{folder}".format(folder=folder,)
+
+ @staticmethod
+ def parse_common_folder_path(path: str) -> Dict[str, str]:
+ """Parse a folder path into its component segments."""
+ m = re.match(r"^folders/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_organization_path(organization: str,) -> str:
+ """Return a fully-qualified organization string."""
+ return "organizations/{organization}".format(organization=organization,)
+
+ @staticmethod
+ def parse_common_organization_path(path: str) -> Dict[str, str]:
+ """Parse a organization path into its component segments."""
+ m = re.match(r"^organizations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_project_path(project: str,) -> str:
+ """Return a fully-qualified project string."""
+ return "projects/{project}".format(project=project,)
+
+ @staticmethod
+ def parse_common_project_path(path: str) -> Dict[str, str]:
+ """Parse a project path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_location_path(project: str, location: str,) -> str:
+ """Return a fully-qualified location string."""
+ return "projects/{project}/locations/{location}".format(
+ project=project, location=location,
+ )
+
+ @staticmethod
+ def parse_common_location_path(path: str) -> Dict[str, str]:
+ """Parse a location path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
def __init__(
self,
*,
- credentials: credentials.Credentials = None,
- transport: Union[str, PolicyTagManagerTransport] = None,
- client_options: ClientOptions = None,
+ credentials: Optional[credentials.Credentials] = None,
+ transport: Union[str, PolicyTagManagerTransport, None] = None,
+ client_options: Optional[client_options_lib.ClientOptions] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the policy tag manager client.
@@ -187,48 +258,74 @@ def __init__(
transport (Union[str, ~.PolicyTagManagerTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
- client_options (ClientOptions): Custom options for the client. It
- won't take effect if a ``transport`` instance is provided.
+ client_options (client_options_lib.ClientOptions): Custom options for the
+ client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
- default endpoint provided by the client. GOOGLE_API_USE_MTLS
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
- use the default regular endpoint, this is the default value for
- the environment variable) and "auto" (auto switch to the default
- mTLS endpoint if client SSL credentials is present). However,
- the ``api_endpoint`` property takes precedence if provided.
- (2) The ``client_cert_source`` property is used to provide client
- SSL credentials for mutual TLS transport. If not provided, the
- default SSL credentials will be used if present.
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
- client_options = ClientOptions.from_dict(client_options)
+ client_options = client_options_lib.from_dict(client_options)
if client_options is None:
- client_options = ClientOptions.ClientOptions()
+ client_options = client_options_lib.ClientOptions()
+
+ # Create SSL credentials for mutual TLS if needed.
+ use_client_cert = bool(
+ util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
+ )
- if client_options.api_endpoint is None:
- use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never")
+ ssl_credentials = None
+ is_mtls = False
+ if use_client_cert:
+ if client_options.client_cert_source:
+ import grpc # type: ignore
+
+ cert, key = client_options.client_cert_source()
+ ssl_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ is_mtls = True
+ else:
+ creds = SslCredentials()
+ is_mtls = creds.is_mtls
+ ssl_credentials = creds.ssl_credentials if is_mtls else None
+
+ # Figure out which api endpoint to use.
+ if client_options.api_endpoint is not None:
+ api_endpoint = client_options.api_endpoint
+ else:
+ use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_mtls_env == "never":
- client_options.api_endpoint = self.DEFAULT_ENDPOINT
+ api_endpoint = self.DEFAULT_ENDPOINT
elif use_mtls_env == "always":
- client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
- has_client_cert_source = (
- client_options.client_cert_source is not None
- or mtls.has_default_client_cert_source()
- )
- client_options.api_endpoint = (
- self.DEFAULT_MTLS_ENDPOINT
- if has_client_cert_source
- else self.DEFAULT_ENDPOINT
+ api_endpoint = (
+ self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT
)
else:
raise MutualTLSChannelError(
- "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always"
+ "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
)
# Save or instantiate the transport.
@@ -252,11 +349,11 @@ def __init__(
self._transport = Transport(
credentials=credentials,
credentials_file=client_options.credentials_file,
- host=client_options.api_endpoint,
+ host=api_endpoint,
scopes=client_options.scopes,
- api_mtls_endpoint=client_options.api_endpoint,
- client_cert_source=client_options.client_cert_source,
+ ssl_channel_credentials=ssl_credentials,
quota_project_id=client_options.quota_project_id,
+ client_info=client_info,
)
def create_taxonomy(
@@ -1332,13 +1429,13 @@ def test_iam_permissions(
try:
- _client_info = gapic_v1.client_info.ClientInfo(
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-datacatalog",
).version,
)
except pkg_resources.DistributionNotFound:
- _client_info = gapic_v1.client_info.ClientInfo()
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("PolicyTagManagerClient",)
diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/base.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/base.py
index abca4532..8d5b5e7c 100644
--- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/base.py
+++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/base.py
@@ -19,7 +19,7 @@
import typing
import pkg_resources
-from google import auth
+from google import auth # type: ignore
from google.api_core import exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
@@ -32,13 +32,13 @@
try:
- _client_info = gapic_v1.client_info.ClientInfo(
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-datacatalog",
).version,
)
except pkg_resources.DistributionNotFound:
- _client_info = gapic_v1.client_info.ClientInfo()
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
class PolicyTagManagerTransport(abc.ABC):
@@ -54,6 +54,7 @@ def __init__(
credentials_file: typing.Optional[str] = None,
scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES,
quota_project_id: typing.Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
**kwargs,
) -> None:
"""Instantiate the transport.
@@ -71,6 +72,11 @@ def __init__(
scope (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
@@ -98,51 +104,51 @@ def __init__(
self._credentials = credentials
# Lifted into its own function so it can be stubbed out during tests.
- self._prep_wrapped_messages()
+ self._prep_wrapped_messages(client_info)
- def _prep_wrapped_messages(self):
+ def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
self.create_taxonomy: gapic_v1.method.wrap_method(
- self.create_taxonomy, default_timeout=None, client_info=_client_info,
+ self.create_taxonomy, default_timeout=None, client_info=client_info,
),
self.delete_taxonomy: gapic_v1.method.wrap_method(
- self.delete_taxonomy, default_timeout=None, client_info=_client_info,
+ self.delete_taxonomy, default_timeout=None, client_info=client_info,
),
self.update_taxonomy: gapic_v1.method.wrap_method(
- self.update_taxonomy, default_timeout=None, client_info=_client_info,
+ self.update_taxonomy, default_timeout=None, client_info=client_info,
),
self.list_taxonomies: gapic_v1.method.wrap_method(
- self.list_taxonomies, default_timeout=None, client_info=_client_info,
+ self.list_taxonomies, default_timeout=None, client_info=client_info,
),
self.get_taxonomy: gapic_v1.method.wrap_method(
- self.get_taxonomy, default_timeout=None, client_info=_client_info,
+ self.get_taxonomy, default_timeout=None, client_info=client_info,
),
self.create_policy_tag: gapic_v1.method.wrap_method(
- self.create_policy_tag, default_timeout=None, client_info=_client_info,
+ self.create_policy_tag, default_timeout=None, client_info=client_info,
),
self.delete_policy_tag: gapic_v1.method.wrap_method(
- self.delete_policy_tag, default_timeout=None, client_info=_client_info,
+ self.delete_policy_tag, default_timeout=None, client_info=client_info,
),
self.update_policy_tag: gapic_v1.method.wrap_method(
- self.update_policy_tag, default_timeout=None, client_info=_client_info,
+ self.update_policy_tag, default_timeout=None, client_info=client_info,
),
self.list_policy_tags: gapic_v1.method.wrap_method(
- self.list_policy_tags, default_timeout=None, client_info=_client_info,
+ self.list_policy_tags, default_timeout=None, client_info=client_info,
),
self.get_policy_tag: gapic_v1.method.wrap_method(
- self.get_policy_tag, default_timeout=None, client_info=_client_info,
+ self.get_policy_tag, default_timeout=None, client_info=client_info,
),
self.get_iam_policy: gapic_v1.method.wrap_method(
- self.get_iam_policy, default_timeout=None, client_info=_client_info,
+ self.get_iam_policy, default_timeout=None, client_info=client_info,
),
self.set_iam_policy: gapic_v1.method.wrap_method(
- self.set_iam_policy, default_timeout=None, client_info=_client_info,
+ self.set_iam_policy, default_timeout=None, client_info=client_info,
),
self.test_iam_permissions: gapic_v1.method.wrap_method(
self.test_iam_permissions,
default_timeout=None,
- client_info=_client_info,
+ client_info=client_info,
),
}
diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py
index d7fc35f0..ee1e1daa 100644
--- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py
+++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc.py
@@ -15,14 +15,15 @@
# limitations under the License.
#
+import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple
from google.api_core import grpc_helpers # type: ignore
+from google.api_core import gapic_v1 # type: ignore
from google import auth # type: ignore
from google.auth import credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
-
import grpc # type: ignore
from google.cloud.datacatalog_v1beta1.types import policytagmanager
@@ -30,7 +31,7 @@
from google.iam.v1 import policy_pb2 as policy # type: ignore
from google.protobuf import empty_pb2 as empty # type: ignore
-from .base import PolicyTagManagerTransport
+from .base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO
class PolicyTagManagerGrpcTransport(PolicyTagManagerTransport):
@@ -59,7 +60,9 @@ def __init__(
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
- quota_project_id: Optional[str] = None
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
+ quota_project_id: Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
@@ -78,16 +81,23 @@ def __init__(
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
- api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If
- provided, it overrides the ``host`` argument and tries to create
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
- client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A
- callback to provide client SSL certificate bytes and private key
- bytes, both in PEM format. It is ignored if ``api_mtls_endpoint``
- is None.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
@@ -95,6 +105,8 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._ssl_channel_credentials = ssl_channel_credentials
+
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
@@ -102,7 +114,13 @@ def __init__(
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
+ self._ssl_channel_credentials = None
elif api_mtls_endpoint:
+ warnings.warn(
+ "api_mtls_endpoint and client_cert_source are deprecated",
+ DeprecationWarning,
+ )
+
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
@@ -133,6 +151,24 @@ def __init__(
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
)
+ self._ssl_channel_credentials = ssl_credentials
+ else:
+ host = host if ":" in host else host + ":443"
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_channel_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
self._stubs = {} # type: Dict[str, Callable]
@@ -143,6 +179,7 @@ def __init__(
credentials_file=credentials_file,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
+ client_info=client_info,
)
@classmethod
@@ -153,7 +190,7 @@ def create_channel(
credentials_file: str = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
- **kwargs
+ **kwargs,
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
@@ -187,24 +224,13 @@ def create_channel(
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
- **kwargs
+ **kwargs,
)
@property
def grpc_channel(self) -> grpc.Channel:
- """Create the channel designed to connect to this service.
-
- This property caches on the instance; repeated calls return
- the same channel.
+ """Return the channel designed to connect to this service.
"""
- # Sanity check: Only create a new channel if we do not already
- # have one.
- if not hasattr(self, "_grpc_channel"):
- self._grpc_channel = self.create_channel(
- self._host, credentials=self._credentials,
- )
-
- # Return the channel from cache.
return self._grpc_channel
@property
diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py
index 217f0a87..71d83118 100644
--- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py
+++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager/transports/grpc_asyncio.py
@@ -15,9 +15,12 @@
# limitations under the License.
#
+import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple
+from google.api_core import gapic_v1 # type: ignore
from google.api_core import grpc_helpers_async # type: ignore
+from google import auth # type: ignore
from google.auth import credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
@@ -29,7 +32,7 @@
from google.iam.v1 import policy_pb2 as policy # type: ignore
from google.protobuf import empty_pb2 as empty # type: ignore
-from .base import PolicyTagManagerTransport
+from .base import PolicyTagManagerTransport, DEFAULT_CLIENT_INFO
from .grpc import PolicyTagManagerGrpcTransport
@@ -101,7 +104,9 @@ def __init__(
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
quota_project_id=None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
@@ -121,16 +126,23 @@ def __init__(
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
- api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If
- provided, it overrides the ``host`` argument and tries to create
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
- client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A
- callback to provide client SSL certificate bytes and private key
- bytes, both in PEM format. It is ignored if ``api_mtls_endpoint``
- is None.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
@@ -138,6 +150,8 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._ssl_channel_credentials = ssl_channel_credentials
+
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
@@ -145,13 +159,24 @@ def __init__(
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
+ self._ssl_channel_credentials = None
elif api_mtls_endpoint:
+ warnings.warn(
+ "api_mtls_endpoint and client_cert_source are deprecated",
+ DeprecationWarning,
+ )
+
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
else api_mtls_endpoint + ":443"
)
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
@@ -171,6 +196,24 @@ def __init__(
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
)
+ self._ssl_channel_credentials = ssl_credentials
+ else:
+ host = host if ":" in host else host + ":443"
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_channel_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
# Run the base constructor.
super().__init__(
@@ -179,6 +222,7 @@ def __init__(
credentials_file=credentials_file,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
+ client_info=client_info,
)
self._stubs = {}
@@ -190,13 +234,6 @@ def grpc_channel(self) -> aio.Channel:
This property caches on the instance; repeated calls return
the same channel.
"""
- # Sanity check: Only create a new channel if we do not already
- # have one.
- if not hasattr(self, "_grpc_channel"):
- self._grpc_channel = self.create_channel(
- self._host, credentials=self._credentials,
- )
-
# Return the channel from cache.
return self._grpc_channel
diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py
index 474cc182..cfbd3082 100644
--- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py
+++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/async_client.py
@@ -31,7 +31,7 @@
from google.cloud.datacatalog_v1beta1.types import policytagmanager
from google.cloud.datacatalog_v1beta1.types import policytagmanagerserialization
-from .transports.base import PolicyTagManagerSerializationTransport
+from .transports.base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import PolicyTagManagerSerializationGrpcAsyncIOTransport
from .client import PolicyTagManagerSerializationClient
@@ -47,11 +47,60 @@ class PolicyTagManagerSerializationAsyncClient:
DEFAULT_ENDPOINT = PolicyTagManagerSerializationClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = PolicyTagManagerSerializationClient.DEFAULT_MTLS_ENDPOINT
+ taxonomy_path = staticmethod(PolicyTagManagerSerializationClient.taxonomy_path)
+ parse_taxonomy_path = staticmethod(
+ PolicyTagManagerSerializationClient.parse_taxonomy_path
+ )
+
+ common_billing_account_path = staticmethod(
+ PolicyTagManagerSerializationClient.common_billing_account_path
+ )
+ parse_common_billing_account_path = staticmethod(
+ PolicyTagManagerSerializationClient.parse_common_billing_account_path
+ )
+
+ common_folder_path = staticmethod(
+ PolicyTagManagerSerializationClient.common_folder_path
+ )
+ parse_common_folder_path = staticmethod(
+ PolicyTagManagerSerializationClient.parse_common_folder_path
+ )
+
+ common_organization_path = staticmethod(
+ PolicyTagManagerSerializationClient.common_organization_path
+ )
+ parse_common_organization_path = staticmethod(
+ PolicyTagManagerSerializationClient.parse_common_organization_path
+ )
+
+ common_project_path = staticmethod(
+ PolicyTagManagerSerializationClient.common_project_path
+ )
+ parse_common_project_path = staticmethod(
+ PolicyTagManagerSerializationClient.parse_common_project_path
+ )
+
+ common_location_path = staticmethod(
+ PolicyTagManagerSerializationClient.common_location_path
+ )
+ parse_common_location_path = staticmethod(
+ PolicyTagManagerSerializationClient.parse_common_location_path
+ )
+
from_service_account_file = (
PolicyTagManagerSerializationClient.from_service_account_file
)
from_service_account_json = from_service_account_file
+ @property
+ def transport(self) -> PolicyTagManagerSerializationTransport:
+ """Return the transport used by the client instance.
+
+ Returns:
+ PolicyTagManagerSerializationTransport: The transport used by the client instance.
+ """
+ return self._client.transport
+
get_transport_class = functools.partial(
type(PolicyTagManagerSerializationClient).get_transport_class,
type(PolicyTagManagerSerializationClient),
@@ -63,6 +112,7 @@ def __init__(
credentials: credentials.Credentials = None,
transport: Union[str, PolicyTagManagerSerializationTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the policy tag manager serialization client.
@@ -78,16 +128,19 @@ def __init__(
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
- default endpoint provided by the client. GOOGLE_API_USE_MTLS
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
- use the default regular endpoint, this is the default value for
- the environment variable) and "auto" (auto switch to the default
- mTLS endpoint if client SSL credentials is present). However,
- the ``api_endpoint`` property takes precedence if provided.
- (2) The ``client_cert_source`` property is used to provide client
- SSL credentials for mutual TLS transport. If not provided, the
- default SSL credentials will be used if present.
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
@@ -95,7 +148,10 @@ def __init__(
"""
self._client = PolicyTagManagerSerializationClient(
- credentials=credentials, transport=transport, client_options=client_options,
+ credentials=credentials,
+ transport=transport,
+ client_options=client_options,
+ client_info=client_info,
)
async def import_taxonomies(
@@ -138,7 +194,7 @@ async def import_taxonomies(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.import_taxonomies,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -193,7 +249,7 @@ async def export_taxonomies(
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.export_taxonomies,
default_timeout=None,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -210,13 +266,13 @@ async def export_taxonomies(
try:
- _client_info = gapic_v1.client_info.ClientInfo(
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-datacatalog",
).version,
)
except pkg_resources.DistributionNotFound:
- _client_info = gapic_v1.client_info.ClientInfo()
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("PolicyTagManagerSerializationAsyncClient",)
diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py
index 445d151d..65a709e3 100644
--- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py
+++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/client.py
@@ -16,24 +16,26 @@
#
from collections import OrderedDict
+from distutils import util
import os
import re
-from typing import Callable, Dict, Sequence, Tuple, Type, Union
+from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
-import google.api_core.client_options as ClientOptions # type: ignore
+from google.api_core import client_options as client_options_lib # type: ignore
from google.api_core import exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials # type: ignore
from google.auth.transport import mtls # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
from google.cloud.datacatalog_v1beta1.types import policytagmanager
from google.cloud.datacatalog_v1beta1.types import policytagmanagerserialization
-from .transports.base import PolicyTagManagerSerializationTransport
+from .transports.base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import PolicyTagManagerSerializationGrpcTransport
from .transports.grpc_asyncio import PolicyTagManagerSerializationGrpcAsyncIOTransport
@@ -137,12 +139,97 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
from_service_account_json = from_service_account_file
+ @property
+ def transport(self) -> PolicyTagManagerSerializationTransport:
+ """Return the transport used by the client instance.
+
+ Returns:
+ PolicyTagManagerSerializationTransport: The transport used by the client instance.
+ """
+ return self._transport
+
+ @staticmethod
+ def taxonomy_path(project: str, location: str, taxonomy: str,) -> str:
+ """Return a fully-qualified taxonomy string."""
+ return "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format(
+ project=project, location=location, taxonomy=taxonomy,
+ )
+
+ @staticmethod
+ def parse_taxonomy_path(path: str) -> Dict[str, str]:
+ """Parse a taxonomy path into its component segments."""
+ m = re.match(
+ r"^projects/(?P.+?)/locations/(?P.+?)/taxonomies/(?P.+?)$",
+ path,
+ )
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_billing_account_path(billing_account: str,) -> str:
+ """Return a fully-qualified billing_account string."""
+ return "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+
+ @staticmethod
+ def parse_common_billing_account_path(path: str) -> Dict[str, str]:
+ """Parse a billing_account path into its component segments."""
+ m = re.match(r"^billingAccounts/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_folder_path(folder: str,) -> str:
+ """Return a fully-qualified folder string."""
+ return "folders/{folder}".format(folder=folder,)
+
+ @staticmethod
+ def parse_common_folder_path(path: str) -> Dict[str, str]:
+ """Parse a folder path into its component segments."""
+ m = re.match(r"^folders/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_organization_path(organization: str,) -> str:
+ """Return a fully-qualified organization string."""
+ return "organizations/{organization}".format(organization=organization,)
+
+ @staticmethod
+ def parse_common_organization_path(path: str) -> Dict[str, str]:
+ """Parse a organization path into its component segments."""
+ m = re.match(r"^organizations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_project_path(project: str,) -> str:
+ """Return a fully-qualified project string."""
+ return "projects/{project}".format(project=project,)
+
+ @staticmethod
+ def parse_common_project_path(path: str) -> Dict[str, str]:
+ """Parse a project path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_location_path(project: str, location: str,) -> str:
+ """Return a fully-qualified location string."""
+ return "projects/{project}/locations/{location}".format(
+ project=project, location=location,
+ )
+
+ @staticmethod
+ def parse_common_location_path(path: str) -> Dict[str, str]:
+ """Parse a location path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
def __init__(
self,
*,
- credentials: credentials.Credentials = None,
- transport: Union[str, PolicyTagManagerSerializationTransport] = None,
- client_options: ClientOptions = None,
+ credentials: Optional[credentials.Credentials] = None,
+ transport: Union[str, PolicyTagManagerSerializationTransport, None] = None,
+ client_options: Optional[client_options_lib.ClientOptions] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the policy tag manager serialization client.
@@ -155,48 +242,74 @@ def __init__(
transport (Union[str, ~.PolicyTagManagerSerializationTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
- client_options (ClientOptions): Custom options for the client. It
- won't take effect if a ``transport`` instance is provided.
+ client_options (client_options_lib.ClientOptions): Custom options for the
+ client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
- default endpoint provided by the client. GOOGLE_API_USE_MTLS
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
- use the default regular endpoint, this is the default value for
- the environment variable) and "auto" (auto switch to the default
- mTLS endpoint if client SSL credentials is present). However,
- the ``api_endpoint`` property takes precedence if provided.
- (2) The ``client_cert_source`` property is used to provide client
- SSL credentials for mutual TLS transport. If not provided, the
- default SSL credentials will be used if present.
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
- client_options = ClientOptions.from_dict(client_options)
+ client_options = client_options_lib.from_dict(client_options)
if client_options is None:
- client_options = ClientOptions.ClientOptions()
+ client_options = client_options_lib.ClientOptions()
+
+ # Create SSL credentials for mutual TLS if needed.
+ use_client_cert = bool(
+ util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
+ )
+
+ ssl_credentials = None
+ is_mtls = False
+ if use_client_cert:
+ if client_options.client_cert_source:
+ import grpc # type: ignore
- if client_options.api_endpoint is None:
- use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never")
+ cert, key = client_options.client_cert_source()
+ ssl_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ is_mtls = True
+ else:
+ creds = SslCredentials()
+ is_mtls = creds.is_mtls
+ ssl_credentials = creds.ssl_credentials if is_mtls else None
+
+ # Figure out which api endpoint to use.
+ if client_options.api_endpoint is not None:
+ api_endpoint = client_options.api_endpoint
+ else:
+ use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_mtls_env == "never":
- client_options.api_endpoint = self.DEFAULT_ENDPOINT
+ api_endpoint = self.DEFAULT_ENDPOINT
elif use_mtls_env == "always":
- client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
- has_client_cert_source = (
- client_options.client_cert_source is not None
- or mtls.has_default_client_cert_source()
- )
- client_options.api_endpoint = (
- self.DEFAULT_MTLS_ENDPOINT
- if has_client_cert_source
- else self.DEFAULT_ENDPOINT
+ api_endpoint = (
+ self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT
)
else:
raise MutualTLSChannelError(
- "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always"
+ "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
)
# Save or instantiate the transport.
@@ -220,11 +333,11 @@ def __init__(
self._transport = Transport(
credentials=credentials,
credentials_file=client_options.credentials_file,
- host=client_options.api_endpoint,
+ host=api_endpoint,
scopes=client_options.scopes,
- api_mtls_endpoint=client_options.api_endpoint,
- client_cert_source=client_options.client_cert_source,
+ ssl_channel_credentials=ssl_credentials,
quota_project_id=client_options.quota_project_id,
+ client_info=client_info,
)
def import_taxonomies(
@@ -345,13 +458,13 @@ def export_taxonomies(
try:
- _client_info = gapic_v1.client_info.ClientInfo(
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-datacatalog",
).version,
)
except pkg_resources.DistributionNotFound:
- _client_info = gapic_v1.client_info.ClientInfo()
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("PolicyTagManagerSerializationClient",)
diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/base.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/base.py
index 26360d93..5f5da515 100644
--- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/base.py
+++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/base.py
@@ -19,7 +19,7 @@
import typing
import pkg_resources
-from google import auth
+from google import auth # type: ignore
from google.api_core import exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
@@ -29,13 +29,13 @@
try:
- _client_info = gapic_v1.client_info.ClientInfo(
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-datacatalog",
).version,
)
except pkg_resources.DistributionNotFound:
- _client_info = gapic_v1.client_info.ClientInfo()
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
class PolicyTagManagerSerializationTransport(abc.ABC):
@@ -51,6 +51,7 @@ def __init__(
credentials_file: typing.Optional[str] = None,
scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES,
quota_project_id: typing.Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
**kwargs,
) -> None:
"""Instantiate the transport.
@@ -68,6 +69,11 @@ def __init__(
scope (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
@@ -95,16 +101,16 @@ def __init__(
self._credentials = credentials
# Lifted into its own function so it can be stubbed out during tests.
- self._prep_wrapped_messages()
+ self._prep_wrapped_messages(client_info)
- def _prep_wrapped_messages(self):
+ def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
self.import_taxonomies: gapic_v1.method.wrap_method(
- self.import_taxonomies, default_timeout=None, client_info=_client_info,
+ self.import_taxonomies, default_timeout=None, client_info=client_info,
),
self.export_taxonomies: gapic_v1.method.wrap_method(
- self.export_taxonomies, default_timeout=None, client_info=_client_info,
+ self.export_taxonomies, default_timeout=None, client_info=client_info,
),
}
diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc.py
index d2d74539..84f435e9 100644
--- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc.py
+++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc.py
@@ -15,19 +15,20 @@
# limitations under the License.
#
+import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple
from google.api_core import grpc_helpers # type: ignore
+from google.api_core import gapic_v1 # type: ignore
from google import auth # type: ignore
from google.auth import credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
-
import grpc # type: ignore
from google.cloud.datacatalog_v1beta1.types import policytagmanagerserialization
-from .base import PolicyTagManagerSerializationTransport
+from .base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO
class PolicyTagManagerSerializationGrpcTransport(
@@ -59,7 +60,9 @@ def __init__(
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
- quota_project_id: Optional[str] = None
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
+ quota_project_id: Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
@@ -78,16 +81,23 @@ def __init__(
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
- api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If
- provided, it overrides the ``host`` argument and tries to create
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
- client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A
- callback to provide client SSL certificate bytes and private key
- bytes, both in PEM format. It is ignored if ``api_mtls_endpoint``
- is None.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
@@ -95,6 +105,8 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._ssl_channel_credentials = ssl_channel_credentials
+
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
@@ -102,7 +114,13 @@ def __init__(
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
+ self._ssl_channel_credentials = None
elif api_mtls_endpoint:
+ warnings.warn(
+ "api_mtls_endpoint and client_cert_source are deprecated",
+ DeprecationWarning,
+ )
+
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
@@ -133,6 +151,24 @@ def __init__(
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
)
+ self._ssl_channel_credentials = ssl_credentials
+ else:
+ host = host if ":" in host else host + ":443"
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_channel_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
self._stubs = {} # type: Dict[str, Callable]
@@ -143,6 +179,7 @@ def __init__(
credentials_file=credentials_file,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
+ client_info=client_info,
)
@classmethod
@@ -153,7 +190,7 @@ def create_channel(
credentials_file: str = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
- **kwargs
+ **kwargs,
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
@@ -187,24 +224,13 @@ def create_channel(
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
- **kwargs
+ **kwargs,
)
@property
def grpc_channel(self) -> grpc.Channel:
- """Create the channel designed to connect to this service.
-
- This property caches on the instance; repeated calls return
- the same channel.
+ """Return the channel designed to connect to this service.
"""
- # Sanity check: Only create a new channel if we do not already
- # have one.
- if not hasattr(self, "_grpc_channel"):
- self._grpc_channel = self.create_channel(
- self._host, credentials=self._credentials,
- )
-
- # Return the channel from cache.
return self._grpc_channel
@property
diff --git a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py
index 8e47b76f..a93a8572 100644
--- a/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py
+++ b/google/cloud/datacatalog_v1beta1/services/policy_tag_manager_serialization/transports/grpc_asyncio.py
@@ -15,9 +15,12 @@
# limitations under the License.
#
+import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple
+from google.api_core import gapic_v1 # type: ignore
from google.api_core import grpc_helpers_async # type: ignore
+from google import auth # type: ignore
from google.auth import credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
@@ -26,7 +29,7 @@
from google.cloud.datacatalog_v1beta1.types import policytagmanagerserialization
-from .base import PolicyTagManagerSerializationTransport
+from .base import PolicyTagManagerSerializationTransport, DEFAULT_CLIENT_INFO
from .grpc import PolicyTagManagerSerializationGrpcTransport
@@ -101,7 +104,9 @@ def __init__(
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
quota_project_id=None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
@@ -121,16 +126,23 @@ def __init__(
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
- api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If
- provided, it overrides the ``host`` argument and tries to create
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
- client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A
- callback to provide client SSL certificate bytes and private key
- bytes, both in PEM format. It is ignored if ``api_mtls_endpoint``
- is None.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
@@ -138,6 +150,8 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._ssl_channel_credentials = ssl_channel_credentials
+
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
@@ -145,13 +159,24 @@ def __init__(
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
+ self._ssl_channel_credentials = None
elif api_mtls_endpoint:
+ warnings.warn(
+ "api_mtls_endpoint and client_cert_source are deprecated",
+ DeprecationWarning,
+ )
+
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
else api_mtls_endpoint + ":443"
)
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
@@ -171,6 +196,24 @@ def __init__(
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
)
+ self._ssl_channel_credentials = ssl_credentials
+ else:
+ host = host if ":" in host else host + ":443"
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_channel_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
# Run the base constructor.
super().__init__(
@@ -179,6 +222,7 @@ def __init__(
credentials_file=credentials_file,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
+ client_info=client_info,
)
self._stubs = {}
@@ -190,13 +234,6 @@ def grpc_channel(self) -> aio.Channel:
This property caches on the instance; repeated calls return
the same channel.
"""
- # Sanity check: Only create a new channel if we do not already
- # have one.
- if not hasattr(self, "_grpc_channel"):
- self._grpc_channel = self.create_channel(
- self._host, credentials=self._credentials,
- )
-
# Return the channel from cache.
return self._grpc_channel
diff --git a/google/cloud/datacatalog_v1beta1/types/datacatalog.py b/google/cloud/datacatalog_v1beta1/types/datacatalog.py
index 7bbbae2f..ee843cac 100644
--- a/google/cloud/datacatalog_v1beta1/types/datacatalog.py
+++ b/google/cloud/datacatalog_v1beta1/types/datacatalog.py
@@ -490,7 +490,7 @@ class Entry(proto.Message):
Output only when Entry is of type in the EntryType enum. For
entries with user_specified_type, this field is optional and
defaults to an empty string.
- type (~.datacatalog.EntryType):
+ type_ (~.datacatalog.EntryType):
The type of the entry.
Only used for Entries with types in the
EntryType enum.
@@ -555,7 +555,7 @@ class Entry(proto.Message):
linked_resource = proto.Field(proto.STRING, number=9)
- type = proto.Field(proto.ENUM, number=2, oneof="entry_type", enum="EntryType",)
+ type_ = proto.Field(proto.ENUM, number=2, oneof="entry_type", enum="EntryType",)
user_specified_type = proto.Field(proto.STRING, number=16, oneof="entry_type")
@@ -988,7 +988,7 @@ class ListEntriesResponse(proto.Message):
def raw_page(self):
return self
- entries = proto.RepeatedField(proto.MESSAGE, number=1, message=Entry,)
+ entries = proto.RepeatedField(proto.MESSAGE, number=1, message="Entry",)
next_page_token = proto.Field(proto.STRING, number=2)
diff --git a/google/cloud/datacatalog_v1beta1/types/policytagmanager.py b/google/cloud/datacatalog_v1beta1/types/policytagmanager.py
index 259be1b3..ad1694c3 100644
--- a/google/cloud/datacatalog_v1beta1/types/policytagmanager.py
+++ b/google/cloud/datacatalog_v1beta1/types/policytagmanager.py
@@ -152,7 +152,7 @@ class CreateTaxonomyRequest(proto.Message):
parent = proto.Field(proto.STRING, number=1)
- taxonomy = proto.Field(proto.MESSAGE, number=2, message=Taxonomy,)
+ taxonomy = proto.Field(proto.MESSAGE, number=2, message="Taxonomy",)
class DeleteTaxonomyRequest(proto.Message):
@@ -185,7 +185,7 @@ class UpdateTaxonomyRequest(proto.Message):
to update.
"""
- taxonomy = proto.Field(proto.MESSAGE, number=1, message=Taxonomy,)
+ taxonomy = proto.Field(proto.MESSAGE, number=1, message="Taxonomy",)
update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,)
@@ -231,7 +231,7 @@ class ListTaxonomiesResponse(proto.Message):
def raw_page(self):
return self
- taxonomies = proto.RepeatedField(proto.MESSAGE, number=1, message=Taxonomy,)
+ taxonomies = proto.RepeatedField(proto.MESSAGE, number=1, message="Taxonomy",)
next_page_token = proto.Field(proto.STRING, number=2)
@@ -263,7 +263,7 @@ class CreatePolicyTagRequest(proto.Message):
parent = proto.Field(proto.STRING, number=1)
- policy_tag = proto.Field(proto.MESSAGE, number=2, message=PolicyTag,)
+ policy_tag = proto.Field(proto.MESSAGE, number=2, message="PolicyTag",)
class DeletePolicyTagRequest(proto.Message):
@@ -300,7 +300,7 @@ class UpdatePolicyTagRequest(proto.Message):
to update.
"""
- policy_tag = proto.Field(proto.MESSAGE, number=1, message=PolicyTag,)
+ policy_tag = proto.Field(proto.MESSAGE, number=1, message="PolicyTag",)
update_mask = proto.Field(proto.MESSAGE, number=2, message=field_mask.FieldMask,)
@@ -347,7 +347,7 @@ class ListPolicyTagsResponse(proto.Message):
def raw_page(self):
return self
- policy_tags = proto.RepeatedField(proto.MESSAGE, number=1, message=PolicyTag,)
+ policy_tags = proto.RepeatedField(proto.MESSAGE, number=1, message="PolicyTag",)
next_page_token = proto.Field(proto.STRING, number=2)
diff --git a/google/cloud/datacatalog_v1beta1/types/policytagmanagerserialization.py b/google/cloud/datacatalog_v1beta1/types/policytagmanagerserialization.py
index dd14cd86..2f76dbc7 100644
--- a/google/cloud/datacatalog_v1beta1/types/policytagmanagerserialization.py
+++ b/google/cloud/datacatalog_v1beta1/types/policytagmanagerserialization.py
@@ -116,7 +116,7 @@ class InlineSource(proto.Message):
"""
taxonomies = proto.RepeatedField(
- proto.MESSAGE, number=1, message=SerializedTaxonomy,
+ proto.MESSAGE, number=1, message="SerializedTaxonomy",
)
@@ -167,7 +167,7 @@ class ExportTaxonomiesResponse(proto.Message):
"""
taxonomies = proto.RepeatedField(
- proto.MESSAGE, number=1, message=SerializedTaxonomy,
+ proto.MESSAGE, number=1, message="SerializedTaxonomy",
)
diff --git a/google/cloud/datacatalog_v1beta1/types/schema.py b/google/cloud/datacatalog_v1beta1/types/schema.py
index 55014c32..ebc56879 100644
--- a/google/cloud/datacatalog_v1beta1/types/schema.py
+++ b/google/cloud/datacatalog_v1beta1/types/schema.py
@@ -42,7 +42,7 @@ class ColumnSchema(proto.Message):
Attributes:
column (str):
Required. Name of the column.
- type (str):
+ type_ (str):
Required. Type of the column.
description (str):
Optional. Description of the column. Default
@@ -59,7 +59,7 @@ class ColumnSchema(proto.Message):
column = proto.Field(proto.STRING, number=6)
- type = proto.Field(proto.STRING, number=1)
+ type_ = proto.Field(proto.STRING, number=1)
description = proto.Field(proto.STRING, number=2)
diff --git a/google/cloud/datacatalog_v1beta1/types/tags.py b/google/cloud/datacatalog_v1beta1/types/tags.py
index ddd5cf1f..098fd3c2 100644
--- a/google/cloud/datacatalog_v1beta1/types/tags.py
+++ b/google/cloud/datacatalog_v1beta1/types/tags.py
@@ -202,7 +202,7 @@ class TagTemplateField(proto.Message):
display_name (str):
The display name for this field. Defaults to
an empty string.
- type (~.tags.FieldType):
+ type_ (~.tags.FieldType):
Required. The type of value this tag field
can contain.
is_required (bool):
@@ -221,7 +221,7 @@ class TagTemplateField(proto.Message):
display_name = proto.Field(proto.STRING, number=1)
- type = proto.Field(proto.MESSAGE, number=2, message="FieldType",)
+ type_ = proto.Field(proto.MESSAGE, number=2, message="FieldType",)
is_required = proto.Field(proto.BOOL, number=3)
diff --git a/noxfile.py b/noxfile.py
index f2524069..5ba11445 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -28,7 +28,7 @@
DEFAULT_PYTHON_VERSION = "3.8"
SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"]
-UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"]
+UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"]
@nox.session(python=DEFAULT_PYTHON_VERSION)
@@ -72,7 +72,9 @@ def default(session):
# Install all test dependencies, then install this package in-place.
session.install("asyncmock", "pytest-asyncio")
- session.install("mock", "pytest", "pytest-cov")
+ session.install(
+ "mock", "pytest", "pytest-cov",
+ )
session.install("-e", ".")
# Run py.test against the unit tests.
@@ -141,7 +143,7 @@ def cover(session):
test runs (not system test runs), and then erases coverage data.
"""
session.install("coverage", "pytest-cov")
- session.run("coverage", "report", "--show-missing", "--fail-under=99")
+ session.run("coverage", "report", "--show-missing", "--fail-under=100")
session.run("coverage", "erase")
@@ -173,7 +175,9 @@ def docfx(session):
"""Build the docfx yaml files for this library."""
session.install("-e", ".")
- session.install("sphinx", "alabaster", "recommonmark", "sphinx-docfx-yaml")
+ # sphinx-docfx-yaml supports up to sphinx version 1.5.5.
+ # https://github.com/docascode/sphinx-docfx-yaml/issues/97
+ session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml")
shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
session.run(
diff --git a/samples/snippets/README.rst b/samples/snippets/README.rst
index 343431d9..3476ccea 100644
--- a/samples/snippets/README.rst
+++ b/samples/snippets/README.rst
@@ -1,3 +1,4 @@
+
.. This file is automatically generated. Do not edit this file directly.
Google Cloud Data Catalog Python Samples
@@ -15,13 +16,11 @@ This directory contains samples for Google Cloud Data Catalog. `Google Cloud Dat
.. _Google Cloud Data Catalog: https://cloud.google.com/data-catalog/docs
-
-
-
Setup
-------------------------------------------------------------------------------
+
Authentication
++++++++++++++
@@ -32,6 +31,9 @@ credentials for applications.
.. _Authentication Getting Started Guide:
https://cloud.google.com/docs/authentication/getting-started
+
+
+
Install Dependencies
++++++++++++++++++++
@@ -46,7 +48,7 @@ Install Dependencies
.. _Python Development Environment Setup Guide:
https://cloud.google.com/python/setup
-#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+.
+#. Create a virtualenv. Samples are compatible with Python 3.6+.
.. code-block:: bash
@@ -62,9 +64,15 @@ Install Dependencies
.. _pip: https://pip.pypa.io/
.. _virtualenv: https://virtualenv.pypa.io/
+
+
+
+
+
Samples
-------------------------------------------------------------------------------
+
Lookup entry
+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
@@ -80,6 +88,7 @@ To run this sample:
$ python lookup_entry.py
+
usage: lookup_entry.py [-h]
project_id
{bigquery-dataset,bigquery-table,pubsub-topic} ...
@@ -107,6 +116,10 @@ To run this sample:
+
+
+
+
The client library
-------------------------------------------------------------------------------
@@ -122,4 +135,5 @@ to `browse the source`_ and `report issues`_.
https://github.com/GoogleCloudPlatform/google-cloud-python/issues
-.. _Google Cloud SDK: https://cloud.google.com/sdk/
\ No newline at end of file
+
+.. _Google Cloud SDK: https://cloud.google.com/sdk/
diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py
index ba55d7ce..b90eef00 100644
--- a/samples/snippets/noxfile.py
+++ b/samples/snippets/noxfile.py
@@ -39,6 +39,10 @@
# You can opt out from the test for specific Python versions.
'ignored_versions': ["2.7"],
+ # Old samples are opted out of enforcing Python type hints
+ # All new samples should feature them
+ 'enforce_type_hints': False,
+
# An envvar key for determining the project id to use. Change it
# to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
# build specific Cloud project. You can also use your own string
@@ -132,7 +136,10 @@ def _determine_local_import_names(start_dir):
@nox.session
def lint(session):
- session.install("flake8", "flake8-import-order")
+ if not TEST_CONFIG['enforce_type_hints']:
+ session.install("flake8", "flake8-import-order")
+ else:
+ session.install("flake8", "flake8-import-order", "flake8-annotations")
local_names = _determine_local_import_names(".")
args = FLAKE8_COMMON_ARGS + [
@@ -141,8 +148,18 @@ def lint(session):
"."
]
session.run("flake8", *args)
+#
+# Black
+#
+@nox.session
+def blacken(session):
+ session.install("black")
+ python_files = [path for path in os.listdir(".") if path.endswith(".py")]
+
+ session.run("black", *python_files)
+
#
# Sample Tests
#
@@ -201,6 +218,11 @@ def _get_repo_root():
break
if Path(p / ".git").exists():
return str(p)
+ # .git is not available in repos cloned via Cloud Build
+ # setup.py is always in the library's root, so use that instead
+ # https://github.com/googleapis/synthtool/issues/792
+ if Path(p / "setup.py").exists():
+ return str(p)
p = p.parent
raise Exception("Unable to detect repository root.")
diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh
index ff599eb2..21f6d2a2 100755
--- a/scripts/decrypt-secrets.sh
+++ b/scripts/decrypt-secrets.sh
@@ -20,14 +20,27 @@ ROOT=$( dirname "$DIR" )
# Work from the project root.
cd $ROOT
+# Prevent it from overriding files.
+# We recommend that sample authors use their own service account files and cloud project.
+# In that case, they are supposed to prepare these files by themselves.
+if [[ -f "testing/test-env.sh" ]] || \
+ [[ -f "testing/service-account.json" ]] || \
+ [[ -f "testing/client-secrets.json" ]]; then
+ echo "One or more target files exist, aborting."
+ exit 1
+fi
+
# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources.
PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}"
gcloud secrets versions access latest --secret="python-docs-samples-test-env" \
+ --project="${PROJECT_ID}" \
> testing/test-env.sh
gcloud secrets versions access latest \
--secret="python-docs-samples-service-account" \
+ --project="${PROJECT_ID}" \
> testing/service-account.json
gcloud secrets versions access latest \
--secret="python-docs-samples-client-secrets" \
- > testing/client-secrets.json
\ No newline at end of file
+ --project="${PROJECT_ID}" \
+ > testing/client-secrets.json
diff --git a/scripts/fixup_datacatalog_v1_keywords.py b/scripts/fixup_datacatalog_v1_keywords.py
index 9ad22462..04befa38 100644
--- a/scripts/fixup_datacatalog_v1_keywords.py
+++ b/scripts/fixup_datacatalog_v1_keywords.py
@@ -1,3 +1,4 @@
+#! /usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
diff --git a/scripts/fixup_datacatalog_v1beta1_keywords.py b/scripts/fixup_datacatalog_v1beta1_keywords.py
index e48632cc..bf43018a 100644
--- a/scripts/fixup_datacatalog_v1beta1_keywords.py
+++ b/scripts/fixup_datacatalog_v1beta1_keywords.py
@@ -1,3 +1,4 @@
+#! /usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
diff --git a/synth.metadata b/synth.metadata
index 48e1a9a9..0f3327ea 100644
--- a/synth.metadata
+++ b/synth.metadata
@@ -3,22 +3,30 @@
{
"git": {
"name": ".",
- "remote": "git@github.com:googleapis/python-datacatalog.git",
- "sha": "09d02ebb2738c9663abe060da926c2432d6ffb42"
+ "remote": "git@github.com:googleapis/python-datacatalog",
+ "sha": "7f1b8ee4579c4306d9b6a56498a0755803b9eadf"
+ }
+ },
+ {
+ "git": {
+ "name": "googleapis",
+ "remote": "https://github.com/googleapis/googleapis.git",
+ "sha": "754a312a0d01cfc1484d397872ff45e5565af0da",
+ "internalRef": "342758098"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "d3049e66447b44dc10579e461d5e08e0e3838edd"
+ "sha": "d5fc0bcf9ea9789c5b0e3154a9e3b29e5cea6116"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "d3049e66447b44dc10579e461d5e08e0e3838edd"
+ "sha": "d5fc0bcf9ea9789c5b0e3154a9e3b29e5cea6116"
}
}
],
diff --git a/tests/unit/gapic/datacatalog_v1/test_data_catalog.py b/tests/unit/gapic/datacatalog_v1/test_data_catalog.py
index 79523fa6..7851ae04 100644
--- a/tests/unit/gapic/datacatalog_v1/test_data_catalog.py
+++ b/tests/unit/gapic/datacatalog_v1/test_data_catalog.py
@@ -39,12 +39,9 @@
from google.cloud.datacatalog_v1.types import common
from google.cloud.datacatalog_v1.types import datacatalog
from google.cloud.datacatalog_v1.types import gcs_fileset_spec
-from google.cloud.datacatalog_v1.types import gcs_fileset_spec as gcd_gcs_fileset_spec
from google.cloud.datacatalog_v1.types import schema
-from google.cloud.datacatalog_v1.types import schema as gcd_schema
from google.cloud.datacatalog_v1.types import search
from google.cloud.datacatalog_v1.types import table_spec
-from google.cloud.datacatalog_v1.types import table_spec as gcd_table_spec
from google.cloud.datacatalog_v1.types import tags
from google.cloud.datacatalog_v1.types import timestamps
from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore
@@ -105,12 +102,12 @@ def test_data_catalog_client_from_service_account_file(client_class):
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
- assert client._transport._credentials == creds
+ assert client.transport._credentials == creds
client = client_class.from_service_account_json("dummy/file/path.json")
- assert client._transport._credentials == creds
+ assert client.transport._credentials == creds
- assert client._transport._host == "datacatalog.googleapis.com:443"
+ assert client.transport._host == "datacatalog.googleapis.com:443"
def test_data_catalog_client_get_transport_class():
@@ -164,14 +161,14 @@ def test_data_catalog_client_client_options(
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- api_mtls_endpoint="squid.clam.whelk",
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}):
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
@@ -180,14 +177,14 @@ def test_data_catalog_client_client_options(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}):
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
@@ -196,90 +193,173 @@ def test_data_catalog_client_client_options(
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
- # "auto", and client_cert_source is provided.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+ # unsupported value.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+ with pytest.raises(MutualTLSChannelError):
+ client = client_class()
+
+ # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
+ ):
+ with pytest.raises(ValueError):
+ client = client_class()
+
+ # Check the case quota_project_id is provided
+ options = client_options.ClientOptions(quota_project_id="octopus")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id="octopus",
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name,use_client_cert_env",
+ [
+ (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc", "true"),
+ (
+ DataCatalogAsyncClient,
+ transports.DataCatalogGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "true",
+ ),
+ (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc", "false"),
+ (
+ DataCatalogAsyncClient,
+ transports.DataCatalogGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "false",
+ ),
+ ],
+)
+@mock.patch.object(
+ DataCatalogClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogClient)
+)
+@mock.patch.object(
+ DataCatalogAsyncClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(DataCatalogAsyncClient),
+)
+@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
+def test_data_catalog_client_mtls_env_auto(
+ client_class, transport_class, transport_name, use_client_cert_env
+):
+ # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
+ # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
+
+ # Check the case client_cert_source is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
- patched.return_value = None
- client = client_class(client_options=options)
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=client.DEFAULT_MTLS_ENDPOINT,
- scopes=None,
- api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
- client_cert_source=client_cert_source_callback,
- quota_project_id=None,
- )
-
- # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
- # "auto", and default_client_cert_source is provided.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
- with mock.patch.object(transport_class, "__init__") as patched:
+ ssl_channel_creds = mock.Mock()
with mock.patch(
- "google.auth.transport.mtls.has_default_client_cert_source",
- return_value=True,
+ "grpc.ssl_channel_credentials", return_value=ssl_channel_creds
):
patched.return_value = None
- client = client_class()
+ client = client_class(client_options=options)
+
+ if use_client_cert_env == "false":
+ expected_ssl_channel_creds = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_ssl_channel_creds = ssl_channel_creds
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_MTLS_ENDPOINT,
+ host=expected_host,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=expected_ssl_channel_creds,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
- # "auto", but client_cert_source and default_client_cert_source are None.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
+ # Check the case ADC client cert is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
- "google.auth.transport.mtls.has_default_client_cert_source",
- return_value=False,
+ "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
):
- patched.return_value = None
- client = client_class()
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
- scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
- quota_project_id=None,
- )
-
- # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has
- # unsupported value.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}):
- with pytest.raises(MutualTLSChannelError):
- client = client_class()
-
- # Check the case quota_project_id is provided
- options = client_options.ClientOptions(quota_project_id="octopus")
- with mock.patch.object(transport_class, "__init__") as patched:
- patched.return_value = None
- client = client_class(client_options=options)
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
- scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
- quota_project_id="octopus",
- )
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.is_mtls",
+ new_callable=mock.PropertyMock,
+ ) as is_mtls_mock:
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.ssl_credentials",
+ new_callable=mock.PropertyMock,
+ ) as ssl_credentials_mock:
+ if use_client_cert_env == "false":
+ is_mtls_mock.return_value = False
+ ssl_credentials_mock.return_value = None
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_ssl_channel_creds = None
+ else:
+ is_mtls_mock.return_value = True
+ ssl_credentials_mock.return_value = mock.Mock()
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_ssl_channel_creds = (
+ ssl_credentials_mock.return_value
+ )
+
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ ssl_channel_credentials=expected_ssl_channel_creds,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
+ ):
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.is_mtls",
+ new_callable=mock.PropertyMock,
+ ) as is_mtls_mock:
+ is_mtls_mock.return_value = False
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
@pytest.mark.parametrize(
@@ -306,9 +386,9 @@ def test_data_catalog_client_client_options_scopes(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -336,9 +416,9 @@ def test_data_catalog_client_client_options_credentials_file(
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -353,9 +433,9 @@ def test_data_catalog_client_client_options_from_dict():
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- api_mtls_endpoint="squid.clam.whelk",
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -371,7 +451,7 @@ def test_search_catalog(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.search_catalog), "__call__") as call:
+ with mock.patch.object(type(client.transport.search_catalog), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.SearchCatalogResponse(
next_page_token="next_page_token_value", unreachable=["unreachable_value"],
@@ -386,6 +466,7 @@ def test_search_catalog(
assert args[0] == datacatalog.SearchCatalogRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, pagers.SearchCatalogPager)
assert response.next_page_token == "next_page_token_value"
@@ -398,19 +479,19 @@ def test_search_catalog_from_dict():
@pytest.mark.asyncio
-async def test_search_catalog_async(transport: str = "grpc_asyncio"):
+async def test_search_catalog_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.SearchCatalogRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.SearchCatalogRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.search_catalog), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.search_catalog), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datacatalog.SearchCatalogResponse(
@@ -425,7 +506,7 @@ async def test_search_catalog_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.SearchCatalogRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.SearchCatalogAsyncPager)
@@ -435,11 +516,16 @@ async def test_search_catalog_async(transport: str = "grpc_asyncio"):
assert response.unreachable == ["unreachable_value"]
+@pytest.mark.asyncio
+async def test_search_catalog_async_from_dict():
+ await test_search_catalog_async(request_type=dict)
+
+
def test_search_catalog_flattened():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.search_catalog), "__call__") as call:
+ with mock.patch.object(type(client.transport.search_catalog), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.SearchCatalogResponse()
@@ -484,9 +570,7 @@ async def test_search_catalog_flattened_async():
client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.search_catalog), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.search_catalog), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.SearchCatalogResponse()
@@ -534,7 +618,7 @@ def test_search_catalog_pager():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.search_catalog), "__call__") as call:
+ with mock.patch.object(type(client.transport.search_catalog), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
datacatalog.SearchCatalogResponse(
@@ -569,7 +653,7 @@ def test_search_catalog_pages():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.search_catalog), "__call__") as call:
+ with mock.patch.object(type(client.transport.search_catalog), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
datacatalog.SearchCatalogResponse(
@@ -590,8 +674,8 @@ def test_search_catalog_pages():
RuntimeError,
)
pages = list(client.search_catalog(request={}).pages)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
@@ -600,9 +684,7 @@ async def test_search_catalog_async_pager():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.search_catalog),
- "__call__",
- new_callable=mock.AsyncMock,
+ type(client.transport.search_catalog), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -639,9 +721,7 @@ async def test_search_catalog_async_pages():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.search_catalog),
- "__call__",
- new_callable=mock.AsyncMock,
+ type(client.transport.search_catalog), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -663,10 +743,10 @@ async def test_search_catalog_async_pages():
RuntimeError,
)
pages = []
- async for page in (await client.search_catalog(request={})).pages:
- pages.append(page)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
+ async for page_ in (await client.search_catalog(request={})).pages:
+ pages.append(page_)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
def test_create_entry_group(
@@ -682,7 +762,7 @@ def test_create_entry_group(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_entry_group), "__call__"
+ type(client.transport.create_entry_group), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.EntryGroup(
@@ -700,6 +780,7 @@ def test_create_entry_group(
assert args[0] == datacatalog.CreateEntryGroupRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, datacatalog.EntryGroup)
assert response.name == "name_value"
@@ -714,18 +795,20 @@ def test_create_entry_group_from_dict():
@pytest.mark.asyncio
-async def test_create_entry_group_async(transport: str = "grpc_asyncio"):
+async def test_create_entry_group_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.CreateEntryGroupRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.CreateEntryGroupRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_entry_group), "__call__"
+ type(client.transport.create_entry_group), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -742,7 +825,7 @@ async def test_create_entry_group_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.CreateEntryGroupRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, datacatalog.EntryGroup)
@@ -754,6 +837,11 @@ async def test_create_entry_group_async(transport: str = "grpc_asyncio"):
assert response.description == "description_value"
+@pytest.mark.asyncio
+async def test_create_entry_group_async_from_dict():
+ await test_create_entry_group_async(request_type=dict)
+
+
def test_create_entry_group_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -764,7 +852,7 @@ def test_create_entry_group_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_entry_group), "__call__"
+ type(client.transport.create_entry_group), "__call__"
) as call:
call.return_value = datacatalog.EntryGroup()
@@ -791,7 +879,7 @@ async def test_create_entry_group_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_entry_group), "__call__"
+ type(client.transport.create_entry_group), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datacatalog.EntryGroup()
@@ -814,7 +902,7 @@ def test_create_entry_group_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_entry_group), "__call__"
+ type(client.transport.create_entry_group), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.EntryGroup()
@@ -859,7 +947,7 @@ async def test_create_entry_group_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_entry_group), "__call__"
+ type(client.transport.create_entry_group), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.EntryGroup()
@@ -914,7 +1002,7 @@ def test_get_entry_group(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_entry_group), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.EntryGroup(
name="name_value",
@@ -931,6 +1019,7 @@ def test_get_entry_group(
assert args[0] == datacatalog.GetEntryGroupRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, datacatalog.EntryGroup)
assert response.name == "name_value"
@@ -945,19 +1034,19 @@ def test_get_entry_group_from_dict():
@pytest.mark.asyncio
-async def test_get_entry_group_async(transport: str = "grpc_asyncio"):
+async def test_get_entry_group_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.GetEntryGroupRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.GetEntryGroupRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_entry_group), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datacatalog.EntryGroup(
@@ -973,7 +1062,7 @@ async def test_get_entry_group_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.GetEntryGroupRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, datacatalog.EntryGroup)
@@ -985,6 +1074,11 @@ async def test_get_entry_group_async(transport: str = "grpc_asyncio"):
assert response.description == "description_value"
+@pytest.mark.asyncio
+async def test_get_entry_group_async_from_dict():
+ await test_get_entry_group_async(request_type=dict)
+
+
def test_get_entry_group_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -994,7 +1088,7 @@ def test_get_entry_group_field_headers():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_entry_group), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call:
call.return_value = datacatalog.EntryGroup()
client.get_entry_group(request)
@@ -1019,9 +1113,7 @@ async def test_get_entry_group_field_headers_async():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_entry_group), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datacatalog.EntryGroup()
)
@@ -1042,7 +1134,7 @@ def test_get_entry_group_flattened():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_entry_group), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.EntryGroup()
@@ -1080,9 +1172,7 @@ async def test_get_entry_group_flattened_async():
client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_entry_group), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.EntryGroup()
@@ -1132,7 +1222,7 @@ def test_update_entry_group(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.update_entry_group), "__call__"
+ type(client.transport.update_entry_group), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.EntryGroup(
@@ -1150,6 +1240,7 @@ def test_update_entry_group(
assert args[0] == datacatalog.UpdateEntryGroupRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, datacatalog.EntryGroup)
assert response.name == "name_value"
@@ -1164,18 +1255,20 @@ def test_update_entry_group_from_dict():
@pytest.mark.asyncio
-async def test_update_entry_group_async(transport: str = "grpc_asyncio"):
+async def test_update_entry_group_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.UpdateEntryGroupRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.UpdateEntryGroupRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.update_entry_group), "__call__"
+ type(client.transport.update_entry_group), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -1192,7 +1285,7 @@ async def test_update_entry_group_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.UpdateEntryGroupRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, datacatalog.EntryGroup)
@@ -1204,6 +1297,11 @@ async def test_update_entry_group_async(transport: str = "grpc_asyncio"):
assert response.description == "description_value"
+@pytest.mark.asyncio
+async def test_update_entry_group_async_from_dict():
+ await test_update_entry_group_async(request_type=dict)
+
+
def test_update_entry_group_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -1214,7 +1312,7 @@ def test_update_entry_group_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.update_entry_group), "__call__"
+ type(client.transport.update_entry_group), "__call__"
) as call:
call.return_value = datacatalog.EntryGroup()
@@ -1243,7 +1341,7 @@ async def test_update_entry_group_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.update_entry_group), "__call__"
+ type(client.transport.update_entry_group), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datacatalog.EntryGroup()
@@ -1268,7 +1366,7 @@ def test_update_entry_group_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.update_entry_group), "__call__"
+ type(client.transport.update_entry_group), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.EntryGroup()
@@ -1309,7 +1407,7 @@ async def test_update_entry_group_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.update_entry_group), "__call__"
+ type(client.transport.update_entry_group), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.EntryGroup()
@@ -1361,7 +1459,7 @@ def test_delete_entry_group(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_entry_group), "__call__"
+ type(client.transport.delete_entry_group), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -1383,18 +1481,20 @@ def test_delete_entry_group_from_dict():
@pytest.mark.asyncio
-async def test_delete_entry_group_async(transport: str = "grpc_asyncio"):
+async def test_delete_entry_group_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.DeleteEntryGroupRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.DeleteEntryGroupRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_entry_group), "__call__"
+ type(client.transport.delete_entry_group), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
@@ -1405,12 +1505,17 @@ async def test_delete_entry_group_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.DeleteEntryGroupRequest()
# Establish that the response is the type that we expect.
assert response is None
+@pytest.mark.asyncio
+async def test_delete_entry_group_async_from_dict():
+ await test_delete_entry_group_async(request_type=dict)
+
+
def test_delete_entry_group_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -1421,7 +1526,7 @@ def test_delete_entry_group_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_entry_group), "__call__"
+ type(client.transport.delete_entry_group), "__call__"
) as call:
call.return_value = None
@@ -1448,7 +1553,7 @@ async def test_delete_entry_group_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_entry_group), "__call__"
+ type(client.transport.delete_entry_group), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
@@ -1469,7 +1574,7 @@ def test_delete_entry_group_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_entry_group), "__call__"
+ type(client.transport.delete_entry_group), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -1503,7 +1608,7 @@ async def test_delete_entry_group_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_entry_group), "__call__"
+ type(client.transport.delete_entry_group), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -1546,7 +1651,7 @@ def test_list_entry_groups(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_entry_groups), "__call__"
+ type(client.transport.list_entry_groups), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.ListEntryGroupsResponse(
@@ -1562,6 +1667,7 @@ def test_list_entry_groups(
assert args[0] == datacatalog.ListEntryGroupsRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, pagers.ListEntryGroupsPager)
assert response.next_page_token == "next_page_token_value"
@@ -1572,18 +1678,20 @@ def test_list_entry_groups_from_dict():
@pytest.mark.asyncio
-async def test_list_entry_groups_async(transport: str = "grpc_asyncio"):
+async def test_list_entry_groups_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.ListEntryGroupsRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.ListEntryGroupsRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_entry_groups), "__call__"
+ type(client.transport.list_entry_groups), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -1598,7 +1706,7 @@ async def test_list_entry_groups_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.ListEntryGroupsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListEntryGroupsAsyncPager)
@@ -1606,6 +1714,11 @@ async def test_list_entry_groups_async(transport: str = "grpc_asyncio"):
assert response.next_page_token == "next_page_token_value"
+@pytest.mark.asyncio
+async def test_list_entry_groups_async_from_dict():
+ await test_list_entry_groups_async(request_type=dict)
+
+
def test_list_entry_groups_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -1616,7 +1729,7 @@ def test_list_entry_groups_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_entry_groups), "__call__"
+ type(client.transport.list_entry_groups), "__call__"
) as call:
call.return_value = datacatalog.ListEntryGroupsResponse()
@@ -1643,7 +1756,7 @@ async def test_list_entry_groups_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_entry_groups), "__call__"
+ type(client.transport.list_entry_groups), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datacatalog.ListEntryGroupsResponse()
@@ -1666,7 +1779,7 @@ def test_list_entry_groups_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_entry_groups), "__call__"
+ type(client.transport.list_entry_groups), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.ListEntryGroupsResponse()
@@ -1700,7 +1813,7 @@ async def test_list_entry_groups_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_entry_groups), "__call__"
+ type(client.transport.list_entry_groups), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.ListEntryGroupsResponse()
@@ -1737,7 +1850,7 @@ def test_list_entry_groups_pager():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_entry_groups), "__call__"
+ type(client.transport.list_entry_groups), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -1779,7 +1892,7 @@ def test_list_entry_groups_pages():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_entry_groups), "__call__"
+ type(client.transport.list_entry_groups), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -1803,8 +1916,8 @@ def test_list_entry_groups_pages():
RuntimeError,
)
pages = list(client.list_entry_groups(request={}).pages)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
@@ -1813,7 +1926,7 @@ async def test_list_entry_groups_async_pager():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_entry_groups),
+ type(client.transport.list_entry_groups),
"__call__",
new_callable=mock.AsyncMock,
) as call:
@@ -1854,7 +1967,7 @@ async def test_list_entry_groups_async_pages():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_entry_groups),
+ type(client.transport.list_entry_groups),
"__call__",
new_callable=mock.AsyncMock,
) as call:
@@ -1880,10 +1993,10 @@ async def test_list_entry_groups_async_pages():
RuntimeError,
)
pages = []
- async for page in (await client.list_entry_groups(request={})).pages:
- pages.append(page)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
+ async for page_ in (await client.list_entry_groups(request={})).pages:
+ pages.append(page_)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
def test_create_entry(
@@ -1898,14 +2011,14 @@ def test_create_entry(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.create_entry), "__call__") as call:
+ with mock.patch.object(type(client.transport.create_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.Entry(
name="name_value",
linked_resource="linked_resource_value",
display_name="display_name_value",
description="description_value",
- type=datacatalog.EntryType.TABLE,
+ type_=datacatalog.EntryType.TABLE,
integrated_system=common.IntegratedSystem.BIGQUERY,
gcs_fileset_spec=gcs_fileset_spec.GcsFilesetSpec(
file_patterns=["file_patterns_value"]
@@ -1921,6 +2034,7 @@ def test_create_entry(
assert args[0] == datacatalog.CreateEntryRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, datacatalog.Entry)
assert response.name == "name_value"
@@ -1937,19 +2051,19 @@ def test_create_entry_from_dict():
@pytest.mark.asyncio
-async def test_create_entry_async(transport: str = "grpc_asyncio"):
+async def test_create_entry_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.CreateEntryRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.CreateEntryRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.create_entry), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.create_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datacatalog.Entry(
@@ -1966,7 +2080,7 @@ async def test_create_entry_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.CreateEntryRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, datacatalog.Entry)
@@ -1980,6 +2094,11 @@ async def test_create_entry_async(transport: str = "grpc_asyncio"):
assert response.description == "description_value"
+@pytest.mark.asyncio
+async def test_create_entry_async_from_dict():
+ await test_create_entry_async(request_type=dict)
+
+
def test_create_entry_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -1989,7 +2108,7 @@ def test_create_entry_field_headers():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.create_entry), "__call__") as call:
+ with mock.patch.object(type(client.transport.create_entry), "__call__") as call:
call.return_value = datacatalog.Entry()
client.create_entry(request)
@@ -2014,9 +2133,7 @@ async def test_create_entry_field_headers_async():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.create_entry), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.create_entry), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry())
await client.create_entry(request)
@@ -2035,7 +2152,7 @@ def test_create_entry_flattened():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.create_entry), "__call__") as call:
+ with mock.patch.object(type(client.transport.create_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.Entry()
@@ -2078,9 +2195,7 @@ async def test_create_entry_flattened_async():
client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.create_entry), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.create_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.Entry()
@@ -2132,14 +2247,14 @@ def test_update_entry(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.update_entry), "__call__") as call:
+ with mock.patch.object(type(client.transport.update_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.Entry(
name="name_value",
linked_resource="linked_resource_value",
display_name="display_name_value",
description="description_value",
- type=datacatalog.EntryType.TABLE,
+ type_=datacatalog.EntryType.TABLE,
integrated_system=common.IntegratedSystem.BIGQUERY,
gcs_fileset_spec=gcs_fileset_spec.GcsFilesetSpec(
file_patterns=["file_patterns_value"]
@@ -2155,6 +2270,7 @@ def test_update_entry(
assert args[0] == datacatalog.UpdateEntryRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, datacatalog.Entry)
assert response.name == "name_value"
@@ -2171,19 +2287,19 @@ def test_update_entry_from_dict():
@pytest.mark.asyncio
-async def test_update_entry_async(transport: str = "grpc_asyncio"):
+async def test_update_entry_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.UpdateEntryRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.UpdateEntryRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.update_entry), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.update_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datacatalog.Entry(
@@ -2200,7 +2316,7 @@ async def test_update_entry_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.UpdateEntryRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, datacatalog.Entry)
@@ -2214,6 +2330,11 @@ async def test_update_entry_async(transport: str = "grpc_asyncio"):
assert response.description == "description_value"
+@pytest.mark.asyncio
+async def test_update_entry_async_from_dict():
+ await test_update_entry_async(request_type=dict)
+
+
def test_update_entry_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -2223,7 +2344,7 @@ def test_update_entry_field_headers():
request.entry.name = "entry.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.update_entry), "__call__") as call:
+ with mock.patch.object(type(client.transport.update_entry), "__call__") as call:
call.return_value = datacatalog.Entry()
client.update_entry(request)
@@ -2248,9 +2369,7 @@ async def test_update_entry_field_headers_async():
request.entry.name = "entry.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.update_entry), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.update_entry), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry())
await client.update_entry(request)
@@ -2269,7 +2388,7 @@ def test_update_entry_flattened():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.update_entry), "__call__") as call:
+ with mock.patch.object(type(client.transport.update_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.Entry()
@@ -2308,9 +2427,7 @@ async def test_update_entry_flattened_async():
client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.update_entry), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.update_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.Entry()
@@ -2358,7 +2475,7 @@ def test_delete_entry(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.delete_entry), "__call__") as call:
+ with mock.patch.object(type(client.transport.delete_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -2379,19 +2496,19 @@ def test_delete_entry_from_dict():
@pytest.mark.asyncio
-async def test_delete_entry_async(transport: str = "grpc_asyncio"):
+async def test_delete_entry_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.DeleteEntryRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.DeleteEntryRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.delete_entry), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.delete_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
@@ -2401,12 +2518,17 @@ async def test_delete_entry_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.DeleteEntryRequest()
# Establish that the response is the type that we expect.
assert response is None
+@pytest.mark.asyncio
+async def test_delete_entry_async_from_dict():
+ await test_delete_entry_async(request_type=dict)
+
+
def test_delete_entry_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -2416,7 +2538,7 @@ def test_delete_entry_field_headers():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.delete_entry), "__call__") as call:
+ with mock.patch.object(type(client.transport.delete_entry), "__call__") as call:
call.return_value = None
client.delete_entry(request)
@@ -2441,9 +2563,7 @@ async def test_delete_entry_field_headers_async():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.delete_entry), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.delete_entry), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
await client.delete_entry(request)
@@ -2462,7 +2582,7 @@ def test_delete_entry_flattened():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.delete_entry), "__call__") as call:
+ with mock.patch.object(type(client.transport.delete_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -2494,9 +2614,7 @@ async def test_delete_entry_flattened_async():
client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.delete_entry), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.delete_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -2535,14 +2653,14 @@ def test_get_entry(transport: str = "grpc", request_type=datacatalog.GetEntryReq
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_entry), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.Entry(
name="name_value",
linked_resource="linked_resource_value",
display_name="display_name_value",
description="description_value",
- type=datacatalog.EntryType.TABLE,
+ type_=datacatalog.EntryType.TABLE,
integrated_system=common.IntegratedSystem.BIGQUERY,
gcs_fileset_spec=gcs_fileset_spec.GcsFilesetSpec(
file_patterns=["file_patterns_value"]
@@ -2558,6 +2676,7 @@ def test_get_entry(transport: str = "grpc", request_type=datacatalog.GetEntryReq
assert args[0] == datacatalog.GetEntryRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, datacatalog.Entry)
assert response.name == "name_value"
@@ -2574,19 +2693,19 @@ def test_get_entry_from_dict():
@pytest.mark.asyncio
-async def test_get_entry_async(transport: str = "grpc_asyncio"):
+async def test_get_entry_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.GetEntryRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.GetEntryRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_entry), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datacatalog.Entry(
@@ -2603,7 +2722,7 @@ async def test_get_entry_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.GetEntryRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, datacatalog.Entry)
@@ -2617,6 +2736,11 @@ async def test_get_entry_async(transport: str = "grpc_asyncio"):
assert response.description == "description_value"
+@pytest.mark.asyncio
+async def test_get_entry_async_from_dict():
+ await test_get_entry_async(request_type=dict)
+
+
def test_get_entry_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -2626,7 +2750,7 @@ def test_get_entry_field_headers():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_entry), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_entry), "__call__") as call:
call.return_value = datacatalog.Entry()
client.get_entry(request)
@@ -2651,9 +2775,7 @@ async def test_get_entry_field_headers_async():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_entry), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_entry), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry())
await client.get_entry(request)
@@ -2672,7 +2794,7 @@ def test_get_entry_flattened():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_entry), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.Entry()
@@ -2704,9 +2826,7 @@ async def test_get_entry_flattened_async():
client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_entry), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.Entry()
@@ -2747,14 +2867,14 @@ def test_lookup_entry(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.lookup_entry), "__call__") as call:
+ with mock.patch.object(type(client.transport.lookup_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.Entry(
name="name_value",
linked_resource="linked_resource_value",
display_name="display_name_value",
description="description_value",
- type=datacatalog.EntryType.TABLE,
+ type_=datacatalog.EntryType.TABLE,
integrated_system=common.IntegratedSystem.BIGQUERY,
gcs_fileset_spec=gcs_fileset_spec.GcsFilesetSpec(
file_patterns=["file_patterns_value"]
@@ -2770,6 +2890,7 @@ def test_lookup_entry(
assert args[0] == datacatalog.LookupEntryRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, datacatalog.Entry)
assert response.name == "name_value"
@@ -2786,19 +2907,19 @@ def test_lookup_entry_from_dict():
@pytest.mark.asyncio
-async def test_lookup_entry_async(transport: str = "grpc_asyncio"):
+async def test_lookup_entry_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.LookupEntryRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.LookupEntryRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.lookup_entry), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.lookup_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datacatalog.Entry(
@@ -2815,7 +2936,7 @@ async def test_lookup_entry_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.LookupEntryRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, datacatalog.Entry)
@@ -2829,6 +2950,11 @@ async def test_lookup_entry_async(transport: str = "grpc_asyncio"):
assert response.description == "description_value"
+@pytest.mark.asyncio
+async def test_lookup_entry_async_from_dict():
+ await test_lookup_entry_async(request_type=dict)
+
+
def test_list_entries(
transport: str = "grpc", request_type=datacatalog.ListEntriesRequest
):
@@ -2841,7 +2967,7 @@ def test_list_entries(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_entries), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_entries), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.ListEntriesResponse(
next_page_token="next_page_token_value",
@@ -2856,6 +2982,7 @@ def test_list_entries(
assert args[0] == datacatalog.ListEntriesRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, pagers.ListEntriesPager)
assert response.next_page_token == "next_page_token_value"
@@ -2866,19 +2993,19 @@ def test_list_entries_from_dict():
@pytest.mark.asyncio
-async def test_list_entries_async(transport: str = "grpc_asyncio"):
+async def test_list_entries_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.ListEntriesRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.ListEntriesRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_entries), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_entries), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datacatalog.ListEntriesResponse(next_page_token="next_page_token_value",)
@@ -2890,7 +3017,7 @@ async def test_list_entries_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.ListEntriesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListEntriesAsyncPager)
@@ -2898,6 +3025,11 @@ async def test_list_entries_async(transport: str = "grpc_asyncio"):
assert response.next_page_token == "next_page_token_value"
+@pytest.mark.asyncio
+async def test_list_entries_async_from_dict():
+ await test_list_entries_async(request_type=dict)
+
+
def test_list_entries_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -2907,7 +3039,7 @@ def test_list_entries_field_headers():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_entries), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_entries), "__call__") as call:
call.return_value = datacatalog.ListEntriesResponse()
client.list_entries(request)
@@ -2932,9 +3064,7 @@ async def test_list_entries_field_headers_async():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_entries), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_entries), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datacatalog.ListEntriesResponse()
)
@@ -2955,7 +3085,7 @@ def test_list_entries_flattened():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_entries), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_entries), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.ListEntriesResponse()
@@ -2987,9 +3117,7 @@ async def test_list_entries_flattened_async():
client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_entries), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_entries), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.ListEntriesResponse()
@@ -3024,7 +3152,7 @@ def test_list_entries_pager():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_entries), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_entries), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
datacatalog.ListEntriesResponse(
@@ -3062,7 +3190,7 @@ def test_list_entries_pages():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_entries), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_entries), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
datacatalog.ListEntriesResponse(
@@ -3083,8 +3211,8 @@ def test_list_entries_pages():
RuntimeError,
)
pages = list(client.list_entries(request={}).pages)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
@@ -3093,9 +3221,7 @@ async def test_list_entries_async_pager():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_entries),
- "__call__",
- new_callable=mock.AsyncMock,
+ type(client.transport.list_entries), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -3132,9 +3258,7 @@ async def test_list_entries_async_pages():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_entries),
- "__call__",
- new_callable=mock.AsyncMock,
+ type(client.transport.list_entries), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -3156,10 +3280,10 @@ async def test_list_entries_async_pages():
RuntimeError,
)
pages = []
- async for page in (await client.list_entries(request={})).pages:
- pages.append(page)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
+ async for page_ in (await client.list_entries(request={})).pages:
+ pages.append(page_)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
def test_create_tag_template(
@@ -3175,7 +3299,7 @@ def test_create_tag_template(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_tag_template), "__call__"
+ type(client.transport.create_tag_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplate(
@@ -3191,6 +3315,7 @@ def test_create_tag_template(
assert args[0] == datacatalog.CreateTagTemplateRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, tags.TagTemplate)
assert response.name == "name_value"
@@ -3203,18 +3328,20 @@ def test_create_tag_template_from_dict():
@pytest.mark.asyncio
-async def test_create_tag_template_async(transport: str = "grpc_asyncio"):
+async def test_create_tag_template_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.CreateTagTemplateRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.CreateTagTemplateRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_tag_template), "__call__"
+ type(client.transport.create_tag_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -3227,7 +3354,7 @@ async def test_create_tag_template_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.CreateTagTemplateRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, tags.TagTemplate)
@@ -3237,6 +3364,11 @@ async def test_create_tag_template_async(transport: str = "grpc_asyncio"):
assert response.display_name == "display_name_value"
+@pytest.mark.asyncio
+async def test_create_tag_template_async_from_dict():
+ await test_create_tag_template_async(request_type=dict)
+
+
def test_create_tag_template_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -3247,7 +3379,7 @@ def test_create_tag_template_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_tag_template), "__call__"
+ type(client.transport.create_tag_template), "__call__"
) as call:
call.return_value = tags.TagTemplate()
@@ -3274,7 +3406,7 @@ async def test_create_tag_template_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_tag_template), "__call__"
+ type(client.transport.create_tag_template), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate())
@@ -3295,7 +3427,7 @@ def test_create_tag_template_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_tag_template), "__call__"
+ type(client.transport.create_tag_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplate()
@@ -3340,7 +3472,7 @@ async def test_create_tag_template_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_tag_template), "__call__"
+ type(client.transport.create_tag_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplate()
@@ -3393,9 +3525,7 @@ def test_get_tag_template(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.get_tag_template), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplate(
name="name_value", display_name="display_name_value",
@@ -3410,6 +3540,7 @@ def test_get_tag_template(
assert args[0] == datacatalog.GetTagTemplateRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, tags.TagTemplate)
assert response.name == "name_value"
@@ -3422,19 +3553,19 @@ def test_get_tag_template_from_dict():
@pytest.mark.asyncio
-async def test_get_tag_template_async(transport: str = "grpc_asyncio"):
+async def test_get_tag_template_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.GetTagTemplateRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.GetTagTemplateRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_tag_template), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
tags.TagTemplate(name="name_value", display_name="display_name_value",)
@@ -3446,7 +3577,7 @@ async def test_get_tag_template_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.GetTagTemplateRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, tags.TagTemplate)
@@ -3456,6 +3587,11 @@ async def test_get_tag_template_async(transport: str = "grpc_asyncio"):
assert response.display_name == "display_name_value"
+@pytest.mark.asyncio
+async def test_get_tag_template_async_from_dict():
+ await test_get_tag_template_async(request_type=dict)
+
+
def test_get_tag_template_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -3465,9 +3601,7 @@ def test_get_tag_template_field_headers():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.get_tag_template), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call:
call.return_value = tags.TagTemplate()
client.get_tag_template(request)
@@ -3492,9 +3626,7 @@ async def test_get_tag_template_field_headers_async():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_tag_template), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate())
await client.get_tag_template(request)
@@ -3513,9 +3645,7 @@ def test_get_tag_template_flattened():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.get_tag_template), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplate()
@@ -3547,9 +3677,7 @@ async def test_get_tag_template_flattened_async():
client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_tag_template), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplate()
@@ -3591,7 +3719,7 @@ def test_update_tag_template(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.update_tag_template), "__call__"
+ type(client.transport.update_tag_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplate(
@@ -3607,6 +3735,7 @@ def test_update_tag_template(
assert args[0] == datacatalog.UpdateTagTemplateRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, tags.TagTemplate)
assert response.name == "name_value"
@@ -3619,18 +3748,20 @@ def test_update_tag_template_from_dict():
@pytest.mark.asyncio
-async def test_update_tag_template_async(transport: str = "grpc_asyncio"):
+async def test_update_tag_template_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.UpdateTagTemplateRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.UpdateTagTemplateRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.update_tag_template), "__call__"
+ type(client.transport.update_tag_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -3643,7 +3774,7 @@ async def test_update_tag_template_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.UpdateTagTemplateRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, tags.TagTemplate)
@@ -3653,6 +3784,11 @@ async def test_update_tag_template_async(transport: str = "grpc_asyncio"):
assert response.display_name == "display_name_value"
+@pytest.mark.asyncio
+async def test_update_tag_template_async_from_dict():
+ await test_update_tag_template_async(request_type=dict)
+
+
def test_update_tag_template_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -3663,7 +3799,7 @@ def test_update_tag_template_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.update_tag_template), "__call__"
+ type(client.transport.update_tag_template), "__call__"
) as call:
call.return_value = tags.TagTemplate()
@@ -3693,7 +3829,7 @@ async def test_update_tag_template_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.update_tag_template), "__call__"
+ type(client.transport.update_tag_template), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate())
@@ -3717,7 +3853,7 @@ def test_update_tag_template_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.update_tag_template), "__call__"
+ type(client.transport.update_tag_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplate()
@@ -3758,7 +3894,7 @@ async def test_update_tag_template_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.update_tag_template), "__call__"
+ type(client.transport.update_tag_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplate()
@@ -3808,7 +3944,7 @@ def test_delete_tag_template(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_tag_template), "__call__"
+ type(client.transport.delete_tag_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -3830,18 +3966,20 @@ def test_delete_tag_template_from_dict():
@pytest.mark.asyncio
-async def test_delete_tag_template_async(transport: str = "grpc_asyncio"):
+async def test_delete_tag_template_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.DeleteTagTemplateRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.DeleteTagTemplateRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_tag_template), "__call__"
+ type(client.transport.delete_tag_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
@@ -3852,12 +3990,17 @@ async def test_delete_tag_template_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.DeleteTagTemplateRequest()
# Establish that the response is the type that we expect.
assert response is None
+@pytest.mark.asyncio
+async def test_delete_tag_template_async_from_dict():
+ await test_delete_tag_template_async(request_type=dict)
+
+
def test_delete_tag_template_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -3868,7 +4011,7 @@ def test_delete_tag_template_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_tag_template), "__call__"
+ type(client.transport.delete_tag_template), "__call__"
) as call:
call.return_value = None
@@ -3895,7 +4038,7 @@ async def test_delete_tag_template_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_tag_template), "__call__"
+ type(client.transport.delete_tag_template), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
@@ -3916,7 +4059,7 @@ def test_delete_tag_template_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_tag_template), "__call__"
+ type(client.transport.delete_tag_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -3954,7 +4097,7 @@ async def test_delete_tag_template_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_tag_template), "__call__"
+ type(client.transport.delete_tag_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -3999,7 +4142,7 @@ def test_create_tag_template_field(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_tag_template_field), "__call__"
+ type(client.transport.create_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplateField(
@@ -4018,6 +4161,7 @@ def test_create_tag_template_field(
assert args[0] == datacatalog.CreateTagTemplateFieldRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, tags.TagTemplateField)
assert response.name == "name_value"
@@ -4034,18 +4178,21 @@ def test_create_tag_template_field_from_dict():
@pytest.mark.asyncio
-async def test_create_tag_template_field_async(transport: str = "grpc_asyncio"):
+async def test_create_tag_template_field_async(
+ transport: str = "grpc_asyncio",
+ request_type=datacatalog.CreateTagTemplateFieldRequest,
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.CreateTagTemplateFieldRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_tag_template_field), "__call__"
+ type(client.transport.create_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -4063,7 +4210,7 @@ async def test_create_tag_template_field_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.CreateTagTemplateFieldRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, tags.TagTemplateField)
@@ -4077,6 +4224,11 @@ async def test_create_tag_template_field_async(transport: str = "grpc_asyncio"):
assert response.order == 540
+@pytest.mark.asyncio
+async def test_create_tag_template_field_async_from_dict():
+ await test_create_tag_template_field_async(request_type=dict)
+
+
def test_create_tag_template_field_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -4087,7 +4239,7 @@ def test_create_tag_template_field_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_tag_template_field), "__call__"
+ type(client.transport.create_tag_template_field), "__call__"
) as call:
call.return_value = tags.TagTemplateField()
@@ -4114,7 +4266,7 @@ async def test_create_tag_template_field_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_tag_template_field), "__call__"
+ type(client.transport.create_tag_template_field), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
tags.TagTemplateField()
@@ -4137,7 +4289,7 @@ def test_create_tag_template_field_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_tag_template_field), "__call__"
+ type(client.transport.create_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplateField()
@@ -4182,7 +4334,7 @@ async def test_create_tag_template_field_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_tag_template_field), "__call__"
+ type(client.transport.create_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplateField()
@@ -4238,7 +4390,7 @@ def test_update_tag_template_field(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.update_tag_template_field), "__call__"
+ type(client.transport.update_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplateField(
@@ -4257,6 +4409,7 @@ def test_update_tag_template_field(
assert args[0] == datacatalog.UpdateTagTemplateFieldRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, tags.TagTemplateField)
assert response.name == "name_value"
@@ -4273,18 +4426,21 @@ def test_update_tag_template_field_from_dict():
@pytest.mark.asyncio
-async def test_update_tag_template_field_async(transport: str = "grpc_asyncio"):
+async def test_update_tag_template_field_async(
+ transport: str = "grpc_asyncio",
+ request_type=datacatalog.UpdateTagTemplateFieldRequest,
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.UpdateTagTemplateFieldRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.update_tag_template_field), "__call__"
+ type(client.transport.update_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -4302,7 +4458,7 @@ async def test_update_tag_template_field_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.UpdateTagTemplateFieldRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, tags.TagTemplateField)
@@ -4316,6 +4472,11 @@ async def test_update_tag_template_field_async(transport: str = "grpc_asyncio"):
assert response.order == 540
+@pytest.mark.asyncio
+async def test_update_tag_template_field_async_from_dict():
+ await test_update_tag_template_field_async(request_type=dict)
+
+
def test_update_tag_template_field_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -4326,7 +4487,7 @@ def test_update_tag_template_field_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.update_tag_template_field), "__call__"
+ type(client.transport.update_tag_template_field), "__call__"
) as call:
call.return_value = tags.TagTemplateField()
@@ -4353,7 +4514,7 @@ async def test_update_tag_template_field_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.update_tag_template_field), "__call__"
+ type(client.transport.update_tag_template_field), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
tags.TagTemplateField()
@@ -4376,7 +4537,7 @@ def test_update_tag_template_field_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.update_tag_template_field), "__call__"
+ type(client.transport.update_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplateField()
@@ -4421,7 +4582,7 @@ async def test_update_tag_template_field_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.update_tag_template_field), "__call__"
+ type(client.transport.update_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplateField()
@@ -4477,7 +4638,7 @@ def test_rename_tag_template_field(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.rename_tag_template_field), "__call__"
+ type(client.transport.rename_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplateField(
@@ -4496,6 +4657,7 @@ def test_rename_tag_template_field(
assert args[0] == datacatalog.RenameTagTemplateFieldRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, tags.TagTemplateField)
assert response.name == "name_value"
@@ -4512,18 +4674,21 @@ def test_rename_tag_template_field_from_dict():
@pytest.mark.asyncio
-async def test_rename_tag_template_field_async(transport: str = "grpc_asyncio"):
+async def test_rename_tag_template_field_async(
+ transport: str = "grpc_asyncio",
+ request_type=datacatalog.RenameTagTemplateFieldRequest,
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.RenameTagTemplateFieldRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.rename_tag_template_field), "__call__"
+ type(client.transport.rename_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -4541,7 +4706,7 @@ async def test_rename_tag_template_field_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.RenameTagTemplateFieldRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, tags.TagTemplateField)
@@ -4555,6 +4720,11 @@ async def test_rename_tag_template_field_async(transport: str = "grpc_asyncio"):
assert response.order == 540
+@pytest.mark.asyncio
+async def test_rename_tag_template_field_async_from_dict():
+ await test_rename_tag_template_field_async(request_type=dict)
+
+
def test_rename_tag_template_field_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -4565,7 +4735,7 @@ def test_rename_tag_template_field_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.rename_tag_template_field), "__call__"
+ type(client.transport.rename_tag_template_field), "__call__"
) as call:
call.return_value = tags.TagTemplateField()
@@ -4592,7 +4762,7 @@ async def test_rename_tag_template_field_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.rename_tag_template_field), "__call__"
+ type(client.transport.rename_tag_template_field), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
tags.TagTemplateField()
@@ -4615,7 +4785,7 @@ def test_rename_tag_template_field_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.rename_tag_template_field), "__call__"
+ type(client.transport.rename_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplateField()
@@ -4656,7 +4826,7 @@ async def test_rename_tag_template_field_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.rename_tag_template_field), "__call__"
+ type(client.transport.rename_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplateField()
@@ -4708,7 +4878,7 @@ def test_delete_tag_template_field(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_tag_template_field), "__call__"
+ type(client.transport.delete_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -4730,18 +4900,21 @@ def test_delete_tag_template_field_from_dict():
@pytest.mark.asyncio
-async def test_delete_tag_template_field_async(transport: str = "grpc_asyncio"):
+async def test_delete_tag_template_field_async(
+ transport: str = "grpc_asyncio",
+ request_type=datacatalog.DeleteTagTemplateFieldRequest,
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.DeleteTagTemplateFieldRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_tag_template_field), "__call__"
+ type(client.transport.delete_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
@@ -4752,12 +4925,17 @@ async def test_delete_tag_template_field_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.DeleteTagTemplateFieldRequest()
# Establish that the response is the type that we expect.
assert response is None
+@pytest.mark.asyncio
+async def test_delete_tag_template_field_async_from_dict():
+ await test_delete_tag_template_field_async(request_type=dict)
+
+
def test_delete_tag_template_field_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -4768,7 +4946,7 @@ def test_delete_tag_template_field_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_tag_template_field), "__call__"
+ type(client.transport.delete_tag_template_field), "__call__"
) as call:
call.return_value = None
@@ -4795,7 +4973,7 @@ async def test_delete_tag_template_field_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_tag_template_field), "__call__"
+ type(client.transport.delete_tag_template_field), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
@@ -4816,7 +4994,7 @@ def test_delete_tag_template_field_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_tag_template_field), "__call__"
+ type(client.transport.delete_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -4854,7 +5032,7 @@ async def test_delete_tag_template_field_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_tag_template_field), "__call__"
+ type(client.transport.delete_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -4898,7 +5076,7 @@ def test_create_tag(transport: str = "grpc", request_type=datacatalog.CreateTagR
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.create_tag), "__call__") as call:
+ with mock.patch.object(type(client.transport.create_tag), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = tags.Tag(
name="name_value",
@@ -4916,6 +5094,7 @@ def test_create_tag(transport: str = "grpc", request_type=datacatalog.CreateTagR
assert args[0] == datacatalog.CreateTagRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, tags.Tag)
assert response.name == "name_value"
@@ -4930,19 +5109,19 @@ def test_create_tag_from_dict():
@pytest.mark.asyncio
-async def test_create_tag_async(transport: str = "grpc_asyncio"):
+async def test_create_tag_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.CreateTagRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.CreateTagRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.create_tag), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.create_tag), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
tags.Tag(
@@ -4958,7 +5137,7 @@ async def test_create_tag_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.CreateTagRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, tags.Tag)
@@ -4970,6 +5149,11 @@ async def test_create_tag_async(transport: str = "grpc_asyncio"):
assert response.template_display_name == "template_display_name_value"
+@pytest.mark.asyncio
+async def test_create_tag_async_from_dict():
+ await test_create_tag_async(request_type=dict)
+
+
def test_create_tag_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -4979,7 +5163,7 @@ def test_create_tag_field_headers():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.create_tag), "__call__") as call:
+ with mock.patch.object(type(client.transport.create_tag), "__call__") as call:
call.return_value = tags.Tag()
client.create_tag(request)
@@ -5004,9 +5188,7 @@ async def test_create_tag_field_headers_async():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.create_tag), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.create_tag), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag())
await client.create_tag(request)
@@ -5025,7 +5207,7 @@ def test_create_tag_flattened():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.create_tag), "__call__") as call:
+ with mock.patch.object(type(client.transport.create_tag), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = tags.Tag()
@@ -5063,9 +5245,7 @@ async def test_create_tag_flattened_async():
client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.create_tag), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.create_tag), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = tags.Tag()
@@ -5110,7 +5290,7 @@ def test_update_tag(transport: str = "grpc", request_type=datacatalog.UpdateTagR
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.update_tag), "__call__") as call:
+ with mock.patch.object(type(client.transport.update_tag), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = tags.Tag(
name="name_value",
@@ -5128,6 +5308,7 @@ def test_update_tag(transport: str = "grpc", request_type=datacatalog.UpdateTagR
assert args[0] == datacatalog.UpdateTagRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, tags.Tag)
assert response.name == "name_value"
@@ -5142,19 +5323,19 @@ def test_update_tag_from_dict():
@pytest.mark.asyncio
-async def test_update_tag_async(transport: str = "grpc_asyncio"):
+async def test_update_tag_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.UpdateTagRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.UpdateTagRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.update_tag), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.update_tag), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
tags.Tag(
@@ -5170,7 +5351,7 @@ async def test_update_tag_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.UpdateTagRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, tags.Tag)
@@ -5182,6 +5363,11 @@ async def test_update_tag_async(transport: str = "grpc_asyncio"):
assert response.template_display_name == "template_display_name_value"
+@pytest.mark.asyncio
+async def test_update_tag_async_from_dict():
+ await test_update_tag_async(request_type=dict)
+
+
def test_update_tag_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -5191,7 +5377,7 @@ def test_update_tag_field_headers():
request.tag.name = "tag.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.update_tag), "__call__") as call:
+ with mock.patch.object(type(client.transport.update_tag), "__call__") as call:
call.return_value = tags.Tag()
client.update_tag(request)
@@ -5216,9 +5402,7 @@ async def test_update_tag_field_headers_async():
request.tag.name = "tag.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.update_tag), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.update_tag), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag())
await client.update_tag(request)
@@ -5237,7 +5421,7 @@ def test_update_tag_flattened():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.update_tag), "__call__") as call:
+ with mock.patch.object(type(client.transport.update_tag), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = tags.Tag()
@@ -5276,9 +5460,7 @@ async def test_update_tag_flattened_async():
client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.update_tag), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.update_tag), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = tags.Tag()
@@ -5324,7 +5506,7 @@ def test_delete_tag(transport: str = "grpc", request_type=datacatalog.DeleteTagR
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.delete_tag), "__call__") as call:
+ with mock.patch.object(type(client.transport.delete_tag), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -5345,19 +5527,19 @@ def test_delete_tag_from_dict():
@pytest.mark.asyncio
-async def test_delete_tag_async(transport: str = "grpc_asyncio"):
+async def test_delete_tag_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.DeleteTagRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.DeleteTagRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.delete_tag), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.delete_tag), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
@@ -5367,12 +5549,17 @@ async def test_delete_tag_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.DeleteTagRequest()
# Establish that the response is the type that we expect.
assert response is None
+@pytest.mark.asyncio
+async def test_delete_tag_async_from_dict():
+ await test_delete_tag_async(request_type=dict)
+
+
def test_delete_tag_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -5382,7 +5569,7 @@ def test_delete_tag_field_headers():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.delete_tag), "__call__") as call:
+ with mock.patch.object(type(client.transport.delete_tag), "__call__") as call:
call.return_value = None
client.delete_tag(request)
@@ -5407,9 +5594,7 @@ async def test_delete_tag_field_headers_async():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.delete_tag), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.delete_tag), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
await client.delete_tag(request)
@@ -5428,7 +5613,7 @@ def test_delete_tag_flattened():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.delete_tag), "__call__") as call:
+ with mock.patch.object(type(client.transport.delete_tag), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -5460,9 +5645,7 @@ async def test_delete_tag_flattened_async():
client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.delete_tag), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.delete_tag), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -5501,7 +5684,7 @@ def test_list_tags(transport: str = "grpc", request_type=datacatalog.ListTagsReq
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_tags), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_tags), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.ListTagsResponse(
next_page_token="next_page_token_value",
@@ -5516,6 +5699,7 @@ def test_list_tags(transport: str = "grpc", request_type=datacatalog.ListTagsReq
assert args[0] == datacatalog.ListTagsRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, pagers.ListTagsPager)
assert response.next_page_token == "next_page_token_value"
@@ -5526,19 +5710,19 @@ def test_list_tags_from_dict():
@pytest.mark.asyncio
-async def test_list_tags_async(transport: str = "grpc_asyncio"):
+async def test_list_tags_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.ListTagsRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.ListTagsRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_tags), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_tags), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datacatalog.ListTagsResponse(next_page_token="next_page_token_value",)
@@ -5550,7 +5734,7 @@ async def test_list_tags_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.ListTagsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListTagsAsyncPager)
@@ -5558,6 +5742,11 @@ async def test_list_tags_async(transport: str = "grpc_asyncio"):
assert response.next_page_token == "next_page_token_value"
+@pytest.mark.asyncio
+async def test_list_tags_async_from_dict():
+ await test_list_tags_async(request_type=dict)
+
+
def test_list_tags_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -5567,7 +5756,7 @@ def test_list_tags_field_headers():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_tags), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_tags), "__call__") as call:
call.return_value = datacatalog.ListTagsResponse()
client.list_tags(request)
@@ -5592,9 +5781,7 @@ async def test_list_tags_field_headers_async():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_tags), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_tags), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datacatalog.ListTagsResponse()
)
@@ -5615,7 +5802,7 @@ def test_list_tags_flattened():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_tags), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_tags), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.ListTagsResponse()
@@ -5647,9 +5834,7 @@ async def test_list_tags_flattened_async():
client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_tags), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_tags), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.ListTagsResponse()
@@ -5684,7 +5869,7 @@ def test_list_tags_pager():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_tags), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_tags), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
datacatalog.ListTagsResponse(
@@ -5713,7 +5898,7 @@ def test_list_tags_pages():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_tags), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_tags), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
datacatalog.ListTagsResponse(
@@ -5725,8 +5910,8 @@ def test_list_tags_pages():
RuntimeError,
)
pages = list(client.list_tags(request={}).pages)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
@@ -5735,9 +5920,7 @@ async def test_list_tags_async_pager():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_tags),
- "__call__",
- new_callable=mock.AsyncMock,
+ type(client.transport.list_tags), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -5765,9 +5948,7 @@ async def test_list_tags_async_pages():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_tags),
- "__call__",
- new_callable=mock.AsyncMock,
+ type(client.transport.list_tags), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -5780,10 +5961,10 @@ async def test_list_tags_async_pages():
RuntimeError,
)
pages = []
- async for page in (await client.list_tags(request={})).pages:
- pages.append(page)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
+ async for page_ in (await client.list_tags(request={})).pages:
+ pages.append(page_)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
def test_set_iam_policy(
@@ -5798,7 +5979,7 @@ def test_set_iam_policy(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy(version=774, etag=b"etag_blob",)
@@ -5811,6 +5992,7 @@ def test_set_iam_policy(
assert args[0] == iam_policy.SetIamPolicyRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, policy.Policy)
assert response.version == 774
@@ -5823,19 +6005,19 @@ def test_set_iam_policy_from_dict():
@pytest.mark.asyncio
-async def test_set_iam_policy_async(transport: str = "grpc_asyncio"):
+async def test_set_iam_policy_async(
+ transport: str = "grpc_asyncio", request_type=iam_policy.SetIamPolicyRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = iam_policy.SetIamPolicyRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.set_iam_policy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
policy.Policy(version=774, etag=b"etag_blob",)
@@ -5847,7 +6029,7 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == iam_policy.SetIamPolicyRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, policy.Policy)
@@ -5857,6 +6039,11 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"):
assert response.etag == b"etag_blob"
+@pytest.mark.asyncio
+async def test_set_iam_policy_async_from_dict():
+ await test_set_iam_policy_async(request_type=dict)
+
+
def test_set_iam_policy_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -5866,7 +6053,7 @@ def test_set_iam_policy_field_headers():
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
call.return_value = policy.Policy()
client.set_iam_policy(request)
@@ -5891,9 +6078,7 @@ async def test_set_iam_policy_field_headers_async():
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.set_iam_policy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy())
await client.set_iam_policy(request)
@@ -5908,10 +6093,10 @@ async def test_set_iam_policy_field_headers_async():
assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"]
-def test_set_iam_policy_from_dict():
+def test_set_iam_policy_from_dict_foreign():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
@@ -5928,7 +6113,7 @@ def test_set_iam_policy_flattened():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
@@ -5960,9 +6145,7 @@ async def test_set_iam_policy_flattened_async():
client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.set_iam_policy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
@@ -6003,7 +6186,7 @@ def test_get_iam_policy(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy(version=774, etag=b"etag_blob",)
@@ -6016,6 +6199,7 @@ def test_get_iam_policy(
assert args[0] == iam_policy.GetIamPolicyRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, policy.Policy)
assert response.version == 774
@@ -6028,19 +6212,19 @@ def test_get_iam_policy_from_dict():
@pytest.mark.asyncio
-async def test_get_iam_policy_async(transport: str = "grpc_asyncio"):
+async def test_get_iam_policy_async(
+ transport: str = "grpc_asyncio", request_type=iam_policy.GetIamPolicyRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = iam_policy.GetIamPolicyRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_iam_policy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
policy.Policy(version=774, etag=b"etag_blob",)
@@ -6052,7 +6236,7 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == iam_policy.GetIamPolicyRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, policy.Policy)
@@ -6062,6 +6246,11 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"):
assert response.etag == b"etag_blob"
+@pytest.mark.asyncio
+async def test_get_iam_policy_async_from_dict():
+ await test_get_iam_policy_async(request_type=dict)
+
+
def test_get_iam_policy_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -6071,7 +6260,7 @@ def test_get_iam_policy_field_headers():
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
call.return_value = policy.Policy()
client.get_iam_policy(request)
@@ -6096,9 +6285,7 @@ async def test_get_iam_policy_field_headers_async():
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_iam_policy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy())
await client.get_iam_policy(request)
@@ -6113,10 +6300,10 @@ async def test_get_iam_policy_field_headers_async():
assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"]
-def test_get_iam_policy_from_dict():
+def test_get_iam_policy_from_dict_foreign():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
@@ -6133,7 +6320,7 @@ def test_get_iam_policy_flattened():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
@@ -6165,9 +6352,7 @@ async def test_get_iam_policy_flattened_async():
client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_iam_policy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
@@ -6209,7 +6394,7 @@ def test_test_iam_permissions(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.test_iam_permissions), "__call__"
+ type(client.transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = iam_policy.TestIamPermissionsResponse(
@@ -6225,6 +6410,7 @@ def test_test_iam_permissions(
assert args[0] == iam_policy.TestIamPermissionsRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, iam_policy.TestIamPermissionsResponse)
assert response.permissions == ["permissions_value"]
@@ -6235,18 +6421,20 @@ def test_test_iam_permissions_from_dict():
@pytest.mark.asyncio
-async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"):
+async def test_test_iam_permissions_async(
+ transport: str = "grpc_asyncio", request_type=iam_policy.TestIamPermissionsRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = iam_policy.TestIamPermissionsRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.test_iam_permissions), "__call__"
+ type(client.transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -6259,7 +6447,7 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == iam_policy.TestIamPermissionsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, iam_policy.TestIamPermissionsResponse)
@@ -6267,6 +6455,11 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"):
assert response.permissions == ["permissions_value"]
+@pytest.mark.asyncio
+async def test_test_iam_permissions_async_from_dict():
+ await test_test_iam_permissions_async(request_type=dict)
+
+
def test_test_iam_permissions_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -6277,7 +6470,7 @@ def test_test_iam_permissions_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.test_iam_permissions), "__call__"
+ type(client.transport.test_iam_permissions), "__call__"
) as call:
call.return_value = iam_policy.TestIamPermissionsResponse()
@@ -6304,7 +6497,7 @@ async def test_test_iam_permissions_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.test_iam_permissions), "__call__"
+ type(client.transport.test_iam_permissions), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
iam_policy.TestIamPermissionsResponse()
@@ -6322,11 +6515,11 @@ async def test_test_iam_permissions_field_headers_async():
assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"]
-def test_test_iam_permissions_from_dict():
+def test_test_iam_permissions_from_dict_foreign():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.test_iam_permissions), "__call__"
+ type(client.transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = iam_policy.TestIamPermissionsResponse()
@@ -6376,7 +6569,7 @@ def test_transport_instance():
credentials=credentials.AnonymousCredentials(),
)
client = DataCatalogClient(transport=transport)
- assert client._transport is transport
+ assert client.transport is transport
def test_transport_get_channel():
@@ -6394,10 +6587,22 @@ def test_transport_get_channel():
assert channel
+@pytest.mark.parametrize(
+ "transport_class",
+ [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport],
+)
+def test_transport_adc(transport_class):
+ # Test default credentials are used if not provided.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport_class()
+ adc.assert_called_once()
+
+
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
- assert isinstance(client._transport, transports.DataCatalogGrpcTransport,)
+ assert isinstance(client.transport, transports.DataCatalogGrpcTransport,)
def test_data_catalog_base_transport_error():
@@ -6474,6 +6679,17 @@ def test_data_catalog_base_transport_with_credentials_file():
)
+def test_data_catalog_base_transport_with_adc():
+ # Test the default credentials are used if credentials and credentials_file are None.
+ with mock.patch.object(auth, "default") as adc, mock.patch(
+ "google.cloud.datacatalog_v1.services.data_catalog.transports.DataCatalogTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport = transports.DataCatalogTransport()
+ adc.assert_called_once()
+
+
def test_data_catalog_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(auth, "default") as adc:
@@ -6506,7 +6722,7 @@ def test_data_catalog_host_no_port():
api_endpoint="datacatalog.googleapis.com"
),
)
- assert client._transport._host == "datacatalog.googleapis.com:443"
+ assert client.transport._host == "datacatalog.googleapis.com:443"
def test_data_catalog_host_with_port():
@@ -6516,213 +6732,195 @@ def test_data_catalog_host_with_port():
api_endpoint="datacatalog.googleapis.com:8000"
),
)
- assert client._transport._host == "datacatalog.googleapis.com:8000"
+ assert client.transport._host == "datacatalog.googleapis.com:8000"
def test_data_catalog_grpc_transport_channel():
channel = grpc.insecure_channel("http://localhost/")
- # Check that if channel is provided, mtls endpoint and client_cert_source
- # won't be used.
- callback = mock.MagicMock()
+ # Check that channel is used if provided.
transport = transports.DataCatalogGrpcTransport(
- host="squid.clam.whelk",
- channel=channel,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=callback,
+ host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
- assert not callback.called
+ assert transport._ssl_channel_credentials == None
def test_data_catalog_grpc_asyncio_transport_channel():
channel = aio.insecure_channel("http://localhost/")
- # Check that if channel is provided, mtls endpoint and client_cert_source
- # won't be used.
- callback = mock.MagicMock()
+ # Check that channel is used if provided.
transport = transports.DataCatalogGrpcAsyncIOTransport(
- host="squid.clam.whelk",
- channel=channel,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=callback,
+ host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
- assert not callback.called
-
-
-@mock.patch("grpc.ssl_channel_credentials", autospec=True)
-@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True)
-def test_data_catalog_grpc_transport_channel_mtls_with_client_cert_source(
- grpc_create_channel, grpc_ssl_channel_cred
-):
- # Check that if channel is None, but api_mtls_endpoint and client_cert_source
- # are provided, then a mTLS channel will be created.
- mock_cred = mock.Mock()
-
- mock_ssl_cred = mock.Mock()
- grpc_ssl_channel_cred.return_value = mock_ssl_cred
-
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
-
- transport = transports.DataCatalogGrpcTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=client_cert_source_callback,
- )
- grpc_ssl_channel_cred.assert_called_once_with(
- certificate_chain=b"cert bytes", private_key=b"key bytes"
- )
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
- )
- assert transport.grpc_channel == mock_grpc_channel
-
-
-@mock.patch("grpc.ssl_channel_credentials", autospec=True)
-@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True)
-def test_data_catalog_grpc_asyncio_transport_channel_mtls_with_client_cert_source(
- grpc_create_channel, grpc_ssl_channel_cred
-):
- # Check that if channel is None, but api_mtls_endpoint and client_cert_source
- # are provided, then a mTLS channel will be created.
- mock_cred = mock.Mock()
-
- mock_ssl_cred = mock.Mock()
- grpc_ssl_channel_cred.return_value = mock_ssl_cred
-
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
-
- transport = transports.DataCatalogGrpcAsyncIOTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=client_cert_source_callback,
- )
- grpc_ssl_channel_cred.assert_called_once_with(
- certificate_chain=b"cert bytes", private_key=b"key bytes"
- )
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
- )
- assert transport.grpc_channel == mock_grpc_channel
+ assert transport._ssl_channel_credentials == None
@pytest.mark.parametrize(
- "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"]
+ "transport_class",
+ [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport],
)
-@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True)
-def test_data_catalog_grpc_transport_channel_mtls_with_adc(
- grpc_create_channel, api_mtls_endpoint
-):
- # Check that if channel and client_cert_source are None, but api_mtls_endpoint
- # is provided, then a mTLS channel will be created with SSL ADC.
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
-
- # Mock google.auth.transport.grpc.SslCredentials class.
- mock_ssl_cred = mock.Mock()
- with mock.patch.multiple(
- "google.auth.transport.grpc.SslCredentials",
- __init__=mock.Mock(return_value=None),
- ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
- ):
- mock_cred = mock.Mock()
- transport = transports.DataCatalogGrpcTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint=api_mtls_endpoint,
- client_cert_source=None,
- )
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
- )
- assert transport.grpc_channel == mock_grpc_channel
+def test_data_catalog_transport_channel_mtls_with_client_cert_source(transport_class):
+ with mock.patch(
+ "grpc.ssl_channel_credentials", autospec=True
+ ) as grpc_ssl_channel_cred:
+ with mock.patch.object(
+ transport_class, "create_channel", autospec=True
+ ) as grpc_create_channel:
+ mock_ssl_cred = mock.Mock()
+ grpc_ssl_channel_cred.return_value = mock_ssl_cred
+
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+
+ cred = credentials.AnonymousCredentials()
+ with pytest.warns(DeprecationWarning):
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (cred, None)
+ transport = transport_class(
+ host="squid.clam.whelk",
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=client_cert_source_callback,
+ )
+ adc.assert_called_once()
+
+ grpc_ssl_channel_cred.assert_called_once_with(
+ certificate_chain=b"cert bytes", private_key=b"key bytes"
+ )
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+ assert transport._ssl_channel_credentials == mock_ssl_cred
@pytest.mark.parametrize(
- "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"]
+ "transport_class",
+ [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport],
)
-@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True)
-def test_data_catalog_grpc_asyncio_transport_channel_mtls_with_adc(
- grpc_create_channel, api_mtls_endpoint
-):
- # Check that if channel and client_cert_source are None, but api_mtls_endpoint
- # is provided, then a mTLS channel will be created with SSL ADC.
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
-
- # Mock google.auth.transport.grpc.SslCredentials class.
+def test_data_catalog_transport_channel_mtls_with_adc(transport_class):
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
- mock_cred = mock.Mock()
- transport = transports.DataCatalogGrpcAsyncIOTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint=api_mtls_endpoint,
- client_cert_source=None,
- )
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
- )
- assert transport.grpc_channel == mock_grpc_channel
+ with mock.patch.object(
+ transport_class, "create_channel", autospec=True
+ ) as grpc_create_channel:
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+ mock_cred = mock.Mock()
+
+ with pytest.warns(DeprecationWarning):
+ transport = transport_class(
+ host="squid.clam.whelk",
+ credentials=mock_cred,
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=None,
+ )
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=mock_cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ )
+ assert transport.grpc_channel == mock_grpc_channel
-def test_tag_template_field_path():
+
+def test_entry_path():
project = "squid"
location = "clam"
- tag_template = "whelk"
- field = "octopus"
+ entry_group = "whelk"
+ entry = "octopus"
- expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{field}".format(
- project=project, location=location, tag_template=tag_template, field=field,
- )
- actual = DataCatalogClient.tag_template_field_path(
- project, location, tag_template, field
+ expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}".format(
+ project=project, location=location, entry_group=entry_group, entry=entry,
)
+ actual = DataCatalogClient.entry_path(project, location, entry_group, entry)
assert expected == actual
-def test_parse_tag_template_field_path():
+def test_parse_entry_path():
expected = {
"project": "oyster",
"location": "nudibranch",
- "tag_template": "cuttlefish",
- "field": "mussel",
+ "entry_group": "cuttlefish",
+ "entry": "mussel",
}
- path = DataCatalogClient.tag_template_field_path(**expected)
+ path = DataCatalogClient.entry_path(**expected)
# Check that the path construction is reversible.
- actual = DataCatalogClient.parse_tag_template_field_path(path)
+ actual = DataCatalogClient.parse_entry_path(path)
+ assert expected == actual
+
+
+def test_entry_group_path():
+ project = "winkle"
+ location = "nautilus"
+ entry_group = "scallop"
+
+ expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}".format(
+ project=project, location=location, entry_group=entry_group,
+ )
+ actual = DataCatalogClient.entry_group_path(project, location, entry_group)
+ assert expected == actual
+
+
+def test_parse_entry_group_path():
+ expected = {
+ "project": "abalone",
+ "location": "squid",
+ "entry_group": "clam",
+ }
+ path = DataCatalogClient.entry_group_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = DataCatalogClient.parse_entry_group_path(path)
+ assert expected == actual
+
+
+def test_tag_path():
+ project = "whelk"
+ location = "octopus"
+ entry_group = "oyster"
+ entry = "nudibranch"
+ tag = "cuttlefish"
+
+ expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}".format(
+ project=project,
+ location=location,
+ entry_group=entry_group,
+ entry=entry,
+ tag=tag,
+ )
+ actual = DataCatalogClient.tag_path(project, location, entry_group, entry, tag)
+ assert expected == actual
+
+
+def test_parse_tag_path():
+ expected = {
+ "project": "mussel",
+ "location": "winkle",
+ "entry_group": "nautilus",
+ "entry": "scallop",
+ "tag": "abalone",
+ }
+ path = DataCatalogClient.tag_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = DataCatalogClient.parse_tag_path(path)
assert expected == actual
@@ -6751,86 +6949,152 @@ def test_parse_tag_template_path():
assert expected == actual
-def test_entry_path():
- project = "squid"
- location = "clam"
- entry_group = "whelk"
- entry = "octopus"
+def test_tag_template_field_path():
+ project = "cuttlefish"
+ location = "mussel"
+ tag_template = "winkle"
+ field = "nautilus"
- expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}".format(
- project=project, location=location, entry_group=entry_group, entry=entry,
+ expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{field}".format(
+ project=project, location=location, tag_template=tag_template, field=field,
+ )
+ actual = DataCatalogClient.tag_template_field_path(
+ project, location, tag_template, field
)
- actual = DataCatalogClient.entry_path(project, location, entry_group, entry)
assert expected == actual
-def test_parse_entry_path():
+def test_parse_tag_template_field_path():
expected = {
- "project": "oyster",
- "location": "nudibranch",
- "entry_group": "cuttlefish",
- "entry": "mussel",
+ "project": "scallop",
+ "location": "abalone",
+ "tag_template": "squid",
+ "field": "clam",
}
- path = DataCatalogClient.entry_path(**expected)
+ path = DataCatalogClient.tag_template_field_path(**expected)
# Check that the path construction is reversible.
- actual = DataCatalogClient.parse_entry_path(path)
+ actual = DataCatalogClient.parse_tag_template_field_path(path)
assert expected == actual
-def test_tag_path():
- project = "squid"
- location = "clam"
- entry_group = "whelk"
- entry = "octopus"
- tag = "oyster"
+def test_common_billing_account_path():
+ billing_account = "whelk"
- expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}".format(
- project=project,
- location=location,
- entry_group=entry_group,
- entry=entry,
- tag=tag,
+ expected = "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
)
- actual = DataCatalogClient.tag_path(project, location, entry_group, entry, tag)
+ actual = DataCatalogClient.common_billing_account_path(billing_account)
assert expected == actual
-def test_parse_tag_path():
+def test_parse_common_billing_account_path():
expected = {
- "project": "nudibranch",
- "location": "cuttlefish",
- "entry_group": "mussel",
- "entry": "winkle",
- "tag": "nautilus",
+ "billing_account": "octopus",
}
- path = DataCatalogClient.tag_path(**expected)
+ path = DataCatalogClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
- actual = DataCatalogClient.parse_tag_path(path)
+ actual = DataCatalogClient.parse_common_billing_account_path(path)
assert expected == actual
-def test_entry_group_path():
- project = "squid"
- location = "clam"
- entry_group = "whelk"
+def test_common_folder_path():
+ folder = "oyster"
- expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}".format(
- project=project, location=location, entry_group=entry_group,
+ expected = "folders/{folder}".format(folder=folder,)
+ actual = DataCatalogClient.common_folder_path(folder)
+ assert expected == actual
+
+
+def test_parse_common_folder_path():
+ expected = {
+ "folder": "nudibranch",
+ }
+ path = DataCatalogClient.common_folder_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = DataCatalogClient.parse_common_folder_path(path)
+ assert expected == actual
+
+
+def test_common_organization_path():
+ organization = "cuttlefish"
+
+ expected = "organizations/{organization}".format(organization=organization,)
+ actual = DataCatalogClient.common_organization_path(organization)
+ assert expected == actual
+
+
+def test_parse_common_organization_path():
+ expected = {
+ "organization": "mussel",
+ }
+ path = DataCatalogClient.common_organization_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = DataCatalogClient.parse_common_organization_path(path)
+ assert expected == actual
+
+
+def test_common_project_path():
+ project = "winkle"
+
+ expected = "projects/{project}".format(project=project,)
+ actual = DataCatalogClient.common_project_path(project)
+ assert expected == actual
+
+
+def test_parse_common_project_path():
+ expected = {
+ "project": "nautilus",
+ }
+ path = DataCatalogClient.common_project_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = DataCatalogClient.parse_common_project_path(path)
+ assert expected == actual
+
+
+def test_common_location_path():
+ project = "scallop"
+ location = "abalone"
+
+ expected = "projects/{project}/locations/{location}".format(
+ project=project, location=location,
)
- actual = DataCatalogClient.entry_group_path(project, location, entry_group)
+ actual = DataCatalogClient.common_location_path(project, location)
assert expected == actual
-def test_parse_entry_group_path():
+def test_parse_common_location_path():
expected = {
- "project": "octopus",
- "location": "oyster",
- "entry_group": "nudibranch",
+ "project": "squid",
+ "location": "clam",
}
- path = DataCatalogClient.entry_group_path(**expected)
+ path = DataCatalogClient.common_location_path(**expected)
# Check that the path construction is reversible.
- actual = DataCatalogClient.parse_entry_group_path(path)
+ actual = DataCatalogClient.parse_common_location_path(path)
assert expected == actual
+
+
+def test_client_withDEFAULT_CLIENT_INFO():
+ client_info = gapic_v1.client_info.ClientInfo()
+
+ with mock.patch.object(
+ transports.DataCatalogTransport, "_prep_wrapped_messages"
+ ) as prep:
+ client = DataCatalogClient(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
+
+ with mock.patch.object(
+ transports.DataCatalogTransport, "_prep_wrapped_messages"
+ ) as prep:
+ transport_class = DataCatalogClient.get_transport_class()
+ transport = transport_class(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
diff --git a/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py b/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py
index 427f8b6b..57088c0c 100644
--- a/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py
+++ b/tests/unit/gapic/datacatalog_v1beta1/test_data_catalog.py
@@ -41,14 +41,9 @@
from google.cloud.datacatalog_v1beta1.types import common
from google.cloud.datacatalog_v1beta1.types import datacatalog
from google.cloud.datacatalog_v1beta1.types import gcs_fileset_spec
-from google.cloud.datacatalog_v1beta1.types import (
- gcs_fileset_spec as gcd_gcs_fileset_spec,
-)
from google.cloud.datacatalog_v1beta1.types import schema
-from google.cloud.datacatalog_v1beta1.types import schema as gcd_schema
from google.cloud.datacatalog_v1beta1.types import search
from google.cloud.datacatalog_v1beta1.types import table_spec
-from google.cloud.datacatalog_v1beta1.types import table_spec as gcd_table_spec
from google.cloud.datacatalog_v1beta1.types import tags
from google.cloud.datacatalog_v1beta1.types import timestamps
from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore
@@ -109,12 +104,12 @@ def test_data_catalog_client_from_service_account_file(client_class):
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
- assert client._transport._credentials == creds
+ assert client.transport._credentials == creds
client = client_class.from_service_account_json("dummy/file/path.json")
- assert client._transport._credentials == creds
+ assert client.transport._credentials == creds
- assert client._transport._host == "datacatalog.googleapis.com:443"
+ assert client.transport._host == "datacatalog.googleapis.com:443"
def test_data_catalog_client_get_transport_class():
@@ -168,14 +163,14 @@ def test_data_catalog_client_client_options(
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- api_mtls_endpoint="squid.clam.whelk",
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}):
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
@@ -184,14 +179,14 @@ def test_data_catalog_client_client_options(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}):
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
@@ -200,90 +195,173 @@ def test_data_catalog_client_client_options(
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
- # "auto", and client_cert_source is provided.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+ # unsupported value.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+ with pytest.raises(MutualTLSChannelError):
+ client = client_class()
+
+ # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
+ ):
+ with pytest.raises(ValueError):
+ client = client_class()
+
+ # Check the case quota_project_id is provided
+ options = client_options.ClientOptions(quota_project_id="octopus")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id="octopus",
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name,use_client_cert_env",
+ [
+ (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc", "true"),
+ (
+ DataCatalogAsyncClient,
+ transports.DataCatalogGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "true",
+ ),
+ (DataCatalogClient, transports.DataCatalogGrpcTransport, "grpc", "false"),
+ (
+ DataCatalogAsyncClient,
+ transports.DataCatalogGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "false",
+ ),
+ ],
+)
+@mock.patch.object(
+ DataCatalogClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DataCatalogClient)
+)
+@mock.patch.object(
+ DataCatalogAsyncClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(DataCatalogAsyncClient),
+)
+@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
+def test_data_catalog_client_mtls_env_auto(
+ client_class, transport_class, transport_name, use_client_cert_env
+):
+ # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
+ # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
+
+ # Check the case client_cert_source is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
- patched.return_value = None
- client = client_class(client_options=options)
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=client.DEFAULT_MTLS_ENDPOINT,
- scopes=None,
- api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
- client_cert_source=client_cert_source_callback,
- quota_project_id=None,
- )
-
- # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
- # "auto", and default_client_cert_source is provided.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
- with mock.patch.object(transport_class, "__init__") as patched:
+ ssl_channel_creds = mock.Mock()
with mock.patch(
- "google.auth.transport.mtls.has_default_client_cert_source",
- return_value=True,
+ "grpc.ssl_channel_credentials", return_value=ssl_channel_creds
):
patched.return_value = None
- client = client_class()
+ client = client_class(client_options=options)
+
+ if use_client_cert_env == "false":
+ expected_ssl_channel_creds = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_ssl_channel_creds = ssl_channel_creds
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_MTLS_ENDPOINT,
+ host=expected_host,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=expected_ssl_channel_creds,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
- # "auto", but client_cert_source and default_client_cert_source are None.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
+ # Check the case ADC client cert is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
- "google.auth.transport.mtls.has_default_client_cert_source",
- return_value=False,
+ "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
):
- patched.return_value = None
- client = client_class()
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
- scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
- quota_project_id=None,
- )
-
- # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has
- # unsupported value.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}):
- with pytest.raises(MutualTLSChannelError):
- client = client_class()
-
- # Check the case quota_project_id is provided
- options = client_options.ClientOptions(quota_project_id="octopus")
- with mock.patch.object(transport_class, "__init__") as patched:
- patched.return_value = None
- client = client_class(client_options=options)
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
- scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
- quota_project_id="octopus",
- )
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.is_mtls",
+ new_callable=mock.PropertyMock,
+ ) as is_mtls_mock:
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.ssl_credentials",
+ new_callable=mock.PropertyMock,
+ ) as ssl_credentials_mock:
+ if use_client_cert_env == "false":
+ is_mtls_mock.return_value = False
+ ssl_credentials_mock.return_value = None
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_ssl_channel_creds = None
+ else:
+ is_mtls_mock.return_value = True
+ ssl_credentials_mock.return_value = mock.Mock()
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_ssl_channel_creds = (
+ ssl_credentials_mock.return_value
+ )
+
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ ssl_channel_credentials=expected_ssl_channel_creds,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
+ ):
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.is_mtls",
+ new_callable=mock.PropertyMock,
+ ) as is_mtls_mock:
+ is_mtls_mock.return_value = False
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
@pytest.mark.parametrize(
@@ -310,9 +388,9 @@ def test_data_catalog_client_client_options_scopes(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -340,9 +418,9 @@ def test_data_catalog_client_client_options_credentials_file(
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -357,9 +435,9 @@ def test_data_catalog_client_client_options_from_dict():
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- api_mtls_endpoint="squid.clam.whelk",
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -375,7 +453,7 @@ def test_search_catalog(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.search_catalog), "__call__") as call:
+ with mock.patch.object(type(client.transport.search_catalog), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.SearchCatalogResponse(
next_page_token="next_page_token_value",
@@ -390,6 +468,7 @@ def test_search_catalog(
assert args[0] == datacatalog.SearchCatalogRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, pagers.SearchCatalogPager)
assert response.next_page_token == "next_page_token_value"
@@ -400,19 +479,19 @@ def test_search_catalog_from_dict():
@pytest.mark.asyncio
-async def test_search_catalog_async(transport: str = "grpc_asyncio"):
+async def test_search_catalog_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.SearchCatalogRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.SearchCatalogRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.search_catalog), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.search_catalog), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datacatalog.SearchCatalogResponse(next_page_token="next_page_token_value",)
@@ -424,7 +503,7 @@ async def test_search_catalog_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.SearchCatalogRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.SearchCatalogAsyncPager)
@@ -432,11 +511,16 @@ async def test_search_catalog_async(transport: str = "grpc_asyncio"):
assert response.next_page_token == "next_page_token_value"
+@pytest.mark.asyncio
+async def test_search_catalog_async_from_dict():
+ await test_search_catalog_async(request_type=dict)
+
+
def test_search_catalog_flattened():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.search_catalog), "__call__") as call:
+ with mock.patch.object(type(client.transport.search_catalog), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.SearchCatalogResponse()
@@ -481,9 +565,7 @@ async def test_search_catalog_flattened_async():
client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.search_catalog), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.search_catalog), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.SearchCatalogResponse()
@@ -531,7 +613,7 @@ def test_search_catalog_pager():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.search_catalog), "__call__") as call:
+ with mock.patch.object(type(client.transport.search_catalog), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
datacatalog.SearchCatalogResponse(
@@ -566,7 +648,7 @@ def test_search_catalog_pages():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.search_catalog), "__call__") as call:
+ with mock.patch.object(type(client.transport.search_catalog), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
datacatalog.SearchCatalogResponse(
@@ -587,8 +669,8 @@ def test_search_catalog_pages():
RuntimeError,
)
pages = list(client.search_catalog(request={}).pages)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
@@ -597,9 +679,7 @@ async def test_search_catalog_async_pager():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.search_catalog),
- "__call__",
- new_callable=mock.AsyncMock,
+ type(client.transport.search_catalog), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -636,9 +716,7 @@ async def test_search_catalog_async_pages():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.search_catalog),
- "__call__",
- new_callable=mock.AsyncMock,
+ type(client.transport.search_catalog), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -660,10 +738,10 @@ async def test_search_catalog_async_pages():
RuntimeError,
)
pages = []
- async for page in (await client.search_catalog(request={})).pages:
- pages.append(page)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
+ async for page_ in (await client.search_catalog(request={})).pages:
+ pages.append(page_)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
def test_create_entry_group(
@@ -679,7 +757,7 @@ def test_create_entry_group(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_entry_group), "__call__"
+ type(client.transport.create_entry_group), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.EntryGroup(
@@ -697,6 +775,7 @@ def test_create_entry_group(
assert args[0] == datacatalog.CreateEntryGroupRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, datacatalog.EntryGroup)
assert response.name == "name_value"
@@ -711,18 +790,20 @@ def test_create_entry_group_from_dict():
@pytest.mark.asyncio
-async def test_create_entry_group_async(transport: str = "grpc_asyncio"):
+async def test_create_entry_group_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.CreateEntryGroupRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.CreateEntryGroupRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_entry_group), "__call__"
+ type(client.transport.create_entry_group), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -739,7 +820,7 @@ async def test_create_entry_group_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.CreateEntryGroupRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, datacatalog.EntryGroup)
@@ -751,6 +832,11 @@ async def test_create_entry_group_async(transport: str = "grpc_asyncio"):
assert response.description == "description_value"
+@pytest.mark.asyncio
+async def test_create_entry_group_async_from_dict():
+ await test_create_entry_group_async(request_type=dict)
+
+
def test_create_entry_group_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -761,7 +847,7 @@ def test_create_entry_group_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_entry_group), "__call__"
+ type(client.transport.create_entry_group), "__call__"
) as call:
call.return_value = datacatalog.EntryGroup()
@@ -788,7 +874,7 @@ async def test_create_entry_group_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_entry_group), "__call__"
+ type(client.transport.create_entry_group), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datacatalog.EntryGroup()
@@ -811,7 +897,7 @@ def test_create_entry_group_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_entry_group), "__call__"
+ type(client.transport.create_entry_group), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.EntryGroup()
@@ -856,7 +942,7 @@ async def test_create_entry_group_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_entry_group), "__call__"
+ type(client.transport.create_entry_group), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.EntryGroup()
@@ -912,7 +998,7 @@ def test_update_entry_group(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.update_entry_group), "__call__"
+ type(client.transport.update_entry_group), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.EntryGroup(
@@ -930,6 +1016,7 @@ def test_update_entry_group(
assert args[0] == datacatalog.UpdateEntryGroupRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, datacatalog.EntryGroup)
assert response.name == "name_value"
@@ -944,18 +1031,20 @@ def test_update_entry_group_from_dict():
@pytest.mark.asyncio
-async def test_update_entry_group_async(transport: str = "grpc_asyncio"):
+async def test_update_entry_group_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.UpdateEntryGroupRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.UpdateEntryGroupRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.update_entry_group), "__call__"
+ type(client.transport.update_entry_group), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -972,7 +1061,7 @@ async def test_update_entry_group_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.UpdateEntryGroupRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, datacatalog.EntryGroup)
@@ -984,6 +1073,11 @@ async def test_update_entry_group_async(transport: str = "grpc_asyncio"):
assert response.description == "description_value"
+@pytest.mark.asyncio
+async def test_update_entry_group_async_from_dict():
+ await test_update_entry_group_async(request_type=dict)
+
+
def test_update_entry_group_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -994,7 +1088,7 @@ def test_update_entry_group_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.update_entry_group), "__call__"
+ type(client.transport.update_entry_group), "__call__"
) as call:
call.return_value = datacatalog.EntryGroup()
@@ -1023,7 +1117,7 @@ async def test_update_entry_group_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.update_entry_group), "__call__"
+ type(client.transport.update_entry_group), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datacatalog.EntryGroup()
@@ -1048,7 +1142,7 @@ def test_update_entry_group_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.update_entry_group), "__call__"
+ type(client.transport.update_entry_group), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.EntryGroup()
@@ -1089,7 +1183,7 @@ async def test_update_entry_group_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.update_entry_group), "__call__"
+ type(client.transport.update_entry_group), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.EntryGroup()
@@ -1140,7 +1234,7 @@ def test_get_entry_group(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_entry_group), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.EntryGroup(
name="name_value",
@@ -1157,6 +1251,7 @@ def test_get_entry_group(
assert args[0] == datacatalog.GetEntryGroupRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, datacatalog.EntryGroup)
assert response.name == "name_value"
@@ -1171,19 +1266,19 @@ def test_get_entry_group_from_dict():
@pytest.mark.asyncio
-async def test_get_entry_group_async(transport: str = "grpc_asyncio"):
+async def test_get_entry_group_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.GetEntryGroupRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.GetEntryGroupRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_entry_group), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datacatalog.EntryGroup(
@@ -1199,7 +1294,7 @@ async def test_get_entry_group_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.GetEntryGroupRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, datacatalog.EntryGroup)
@@ -1211,6 +1306,11 @@ async def test_get_entry_group_async(transport: str = "grpc_asyncio"):
assert response.description == "description_value"
+@pytest.mark.asyncio
+async def test_get_entry_group_async_from_dict():
+ await test_get_entry_group_async(request_type=dict)
+
+
def test_get_entry_group_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -1220,7 +1320,7 @@ def test_get_entry_group_field_headers():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_entry_group), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call:
call.return_value = datacatalog.EntryGroup()
client.get_entry_group(request)
@@ -1245,9 +1345,7 @@ async def test_get_entry_group_field_headers_async():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_entry_group), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datacatalog.EntryGroup()
)
@@ -1268,7 +1366,7 @@ def test_get_entry_group_flattened():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_entry_group), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.EntryGroup()
@@ -1306,9 +1404,7 @@ async def test_get_entry_group_flattened_async():
client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_entry_group), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_entry_group), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.EntryGroup()
@@ -1358,7 +1454,7 @@ def test_delete_entry_group(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_entry_group), "__call__"
+ type(client.transport.delete_entry_group), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -1380,18 +1476,20 @@ def test_delete_entry_group_from_dict():
@pytest.mark.asyncio
-async def test_delete_entry_group_async(transport: str = "grpc_asyncio"):
+async def test_delete_entry_group_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.DeleteEntryGroupRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.DeleteEntryGroupRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_entry_group), "__call__"
+ type(client.transport.delete_entry_group), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
@@ -1402,12 +1500,17 @@ async def test_delete_entry_group_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.DeleteEntryGroupRequest()
# Establish that the response is the type that we expect.
assert response is None
+@pytest.mark.asyncio
+async def test_delete_entry_group_async_from_dict():
+ await test_delete_entry_group_async(request_type=dict)
+
+
def test_delete_entry_group_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -1418,7 +1521,7 @@ def test_delete_entry_group_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_entry_group), "__call__"
+ type(client.transport.delete_entry_group), "__call__"
) as call:
call.return_value = None
@@ -1445,7 +1548,7 @@ async def test_delete_entry_group_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_entry_group), "__call__"
+ type(client.transport.delete_entry_group), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
@@ -1466,7 +1569,7 @@ def test_delete_entry_group_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_entry_group), "__call__"
+ type(client.transport.delete_entry_group), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -1500,7 +1603,7 @@ async def test_delete_entry_group_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_entry_group), "__call__"
+ type(client.transport.delete_entry_group), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -1543,7 +1646,7 @@ def test_list_entry_groups(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_entry_groups), "__call__"
+ type(client.transport.list_entry_groups), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.ListEntryGroupsResponse(
@@ -1559,6 +1662,7 @@ def test_list_entry_groups(
assert args[0] == datacatalog.ListEntryGroupsRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, pagers.ListEntryGroupsPager)
assert response.next_page_token == "next_page_token_value"
@@ -1569,18 +1673,20 @@ def test_list_entry_groups_from_dict():
@pytest.mark.asyncio
-async def test_list_entry_groups_async(transport: str = "grpc_asyncio"):
+async def test_list_entry_groups_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.ListEntryGroupsRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.ListEntryGroupsRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_entry_groups), "__call__"
+ type(client.transport.list_entry_groups), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -1595,7 +1701,7 @@ async def test_list_entry_groups_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.ListEntryGroupsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListEntryGroupsAsyncPager)
@@ -1603,6 +1709,11 @@ async def test_list_entry_groups_async(transport: str = "grpc_asyncio"):
assert response.next_page_token == "next_page_token_value"
+@pytest.mark.asyncio
+async def test_list_entry_groups_async_from_dict():
+ await test_list_entry_groups_async(request_type=dict)
+
+
def test_list_entry_groups_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -1613,7 +1724,7 @@ def test_list_entry_groups_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_entry_groups), "__call__"
+ type(client.transport.list_entry_groups), "__call__"
) as call:
call.return_value = datacatalog.ListEntryGroupsResponse()
@@ -1640,7 +1751,7 @@ async def test_list_entry_groups_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_entry_groups), "__call__"
+ type(client.transport.list_entry_groups), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datacatalog.ListEntryGroupsResponse()
@@ -1663,7 +1774,7 @@ def test_list_entry_groups_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_entry_groups), "__call__"
+ type(client.transport.list_entry_groups), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.ListEntryGroupsResponse()
@@ -1697,7 +1808,7 @@ async def test_list_entry_groups_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_entry_groups), "__call__"
+ type(client.transport.list_entry_groups), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.ListEntryGroupsResponse()
@@ -1734,7 +1845,7 @@ def test_list_entry_groups_pager():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_entry_groups), "__call__"
+ type(client.transport.list_entry_groups), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -1776,7 +1887,7 @@ def test_list_entry_groups_pages():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_entry_groups), "__call__"
+ type(client.transport.list_entry_groups), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -1800,8 +1911,8 @@ def test_list_entry_groups_pages():
RuntimeError,
)
pages = list(client.list_entry_groups(request={}).pages)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
@@ -1810,7 +1921,7 @@ async def test_list_entry_groups_async_pager():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_entry_groups),
+ type(client.transport.list_entry_groups),
"__call__",
new_callable=mock.AsyncMock,
) as call:
@@ -1851,7 +1962,7 @@ async def test_list_entry_groups_async_pages():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_entry_groups),
+ type(client.transport.list_entry_groups),
"__call__",
new_callable=mock.AsyncMock,
) as call:
@@ -1877,10 +1988,10 @@ async def test_list_entry_groups_async_pages():
RuntimeError,
)
pages = []
- async for page in (await client.list_entry_groups(request={})).pages:
- pages.append(page)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
+ async for page_ in (await client.list_entry_groups(request={})).pages:
+ pages.append(page_)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
def test_create_entry(
@@ -1895,14 +2006,14 @@ def test_create_entry(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.create_entry), "__call__") as call:
+ with mock.patch.object(type(client.transport.create_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.Entry(
name="name_value",
linked_resource="linked_resource_value",
display_name="display_name_value",
description="description_value",
- type=datacatalog.EntryType.TABLE,
+ type_=datacatalog.EntryType.TABLE,
integrated_system=common.IntegratedSystem.BIGQUERY,
gcs_fileset_spec=gcs_fileset_spec.GcsFilesetSpec(
file_patterns=["file_patterns_value"]
@@ -1918,6 +2029,7 @@ def test_create_entry(
assert args[0] == datacatalog.CreateEntryRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, datacatalog.Entry)
assert response.name == "name_value"
@@ -1934,19 +2046,19 @@ def test_create_entry_from_dict():
@pytest.mark.asyncio
-async def test_create_entry_async(transport: str = "grpc_asyncio"):
+async def test_create_entry_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.CreateEntryRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.CreateEntryRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.create_entry), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.create_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datacatalog.Entry(
@@ -1963,7 +2075,7 @@ async def test_create_entry_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.CreateEntryRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, datacatalog.Entry)
@@ -1977,6 +2089,11 @@ async def test_create_entry_async(transport: str = "grpc_asyncio"):
assert response.description == "description_value"
+@pytest.mark.asyncio
+async def test_create_entry_async_from_dict():
+ await test_create_entry_async(request_type=dict)
+
+
def test_create_entry_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -1986,7 +2103,7 @@ def test_create_entry_field_headers():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.create_entry), "__call__") as call:
+ with mock.patch.object(type(client.transport.create_entry), "__call__") as call:
call.return_value = datacatalog.Entry()
client.create_entry(request)
@@ -2011,9 +2128,7 @@ async def test_create_entry_field_headers_async():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.create_entry), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.create_entry), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry())
await client.create_entry(request)
@@ -2032,7 +2147,7 @@ def test_create_entry_flattened():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.create_entry), "__call__") as call:
+ with mock.patch.object(type(client.transport.create_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.Entry()
@@ -2075,9 +2190,7 @@ async def test_create_entry_flattened_async():
client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.create_entry), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.create_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.Entry()
@@ -2129,14 +2242,14 @@ def test_update_entry(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.update_entry), "__call__") as call:
+ with mock.patch.object(type(client.transport.update_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.Entry(
name="name_value",
linked_resource="linked_resource_value",
display_name="display_name_value",
description="description_value",
- type=datacatalog.EntryType.TABLE,
+ type_=datacatalog.EntryType.TABLE,
integrated_system=common.IntegratedSystem.BIGQUERY,
gcs_fileset_spec=gcs_fileset_spec.GcsFilesetSpec(
file_patterns=["file_patterns_value"]
@@ -2152,6 +2265,7 @@ def test_update_entry(
assert args[0] == datacatalog.UpdateEntryRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, datacatalog.Entry)
assert response.name == "name_value"
@@ -2168,19 +2282,19 @@ def test_update_entry_from_dict():
@pytest.mark.asyncio
-async def test_update_entry_async(transport: str = "grpc_asyncio"):
+async def test_update_entry_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.UpdateEntryRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.UpdateEntryRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.update_entry), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.update_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datacatalog.Entry(
@@ -2197,7 +2311,7 @@ async def test_update_entry_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.UpdateEntryRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, datacatalog.Entry)
@@ -2211,6 +2325,11 @@ async def test_update_entry_async(transport: str = "grpc_asyncio"):
assert response.description == "description_value"
+@pytest.mark.asyncio
+async def test_update_entry_async_from_dict():
+ await test_update_entry_async(request_type=dict)
+
+
def test_update_entry_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -2220,7 +2339,7 @@ def test_update_entry_field_headers():
request.entry.name = "entry.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.update_entry), "__call__") as call:
+ with mock.patch.object(type(client.transport.update_entry), "__call__") as call:
call.return_value = datacatalog.Entry()
client.update_entry(request)
@@ -2245,9 +2364,7 @@ async def test_update_entry_field_headers_async():
request.entry.name = "entry.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.update_entry), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.update_entry), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry())
await client.update_entry(request)
@@ -2266,7 +2383,7 @@ def test_update_entry_flattened():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.update_entry), "__call__") as call:
+ with mock.patch.object(type(client.transport.update_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.Entry()
@@ -2305,9 +2422,7 @@ async def test_update_entry_flattened_async():
client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.update_entry), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.update_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.Entry()
@@ -2355,7 +2470,7 @@ def test_delete_entry(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.delete_entry), "__call__") as call:
+ with mock.patch.object(type(client.transport.delete_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -2376,19 +2491,19 @@ def test_delete_entry_from_dict():
@pytest.mark.asyncio
-async def test_delete_entry_async(transport: str = "grpc_asyncio"):
+async def test_delete_entry_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.DeleteEntryRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.DeleteEntryRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.delete_entry), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.delete_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
@@ -2398,12 +2513,17 @@ async def test_delete_entry_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.DeleteEntryRequest()
# Establish that the response is the type that we expect.
assert response is None
+@pytest.mark.asyncio
+async def test_delete_entry_async_from_dict():
+ await test_delete_entry_async(request_type=dict)
+
+
def test_delete_entry_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -2413,7 +2533,7 @@ def test_delete_entry_field_headers():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.delete_entry), "__call__") as call:
+ with mock.patch.object(type(client.transport.delete_entry), "__call__") as call:
call.return_value = None
client.delete_entry(request)
@@ -2438,9 +2558,7 @@ async def test_delete_entry_field_headers_async():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.delete_entry), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.delete_entry), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
await client.delete_entry(request)
@@ -2459,7 +2577,7 @@ def test_delete_entry_flattened():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.delete_entry), "__call__") as call:
+ with mock.patch.object(type(client.transport.delete_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -2491,9 +2609,7 @@ async def test_delete_entry_flattened_async():
client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.delete_entry), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.delete_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -2532,14 +2648,14 @@ def test_get_entry(transport: str = "grpc", request_type=datacatalog.GetEntryReq
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_entry), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.Entry(
name="name_value",
linked_resource="linked_resource_value",
display_name="display_name_value",
description="description_value",
- type=datacatalog.EntryType.TABLE,
+ type_=datacatalog.EntryType.TABLE,
integrated_system=common.IntegratedSystem.BIGQUERY,
gcs_fileset_spec=gcs_fileset_spec.GcsFilesetSpec(
file_patterns=["file_patterns_value"]
@@ -2555,6 +2671,7 @@ def test_get_entry(transport: str = "grpc", request_type=datacatalog.GetEntryReq
assert args[0] == datacatalog.GetEntryRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, datacatalog.Entry)
assert response.name == "name_value"
@@ -2571,19 +2688,19 @@ def test_get_entry_from_dict():
@pytest.mark.asyncio
-async def test_get_entry_async(transport: str = "grpc_asyncio"):
+async def test_get_entry_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.GetEntryRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.GetEntryRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_entry), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datacatalog.Entry(
@@ -2600,7 +2717,7 @@ async def test_get_entry_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.GetEntryRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, datacatalog.Entry)
@@ -2614,6 +2731,11 @@ async def test_get_entry_async(transport: str = "grpc_asyncio"):
assert response.description == "description_value"
+@pytest.mark.asyncio
+async def test_get_entry_async_from_dict():
+ await test_get_entry_async(request_type=dict)
+
+
def test_get_entry_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -2623,7 +2745,7 @@ def test_get_entry_field_headers():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_entry), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_entry), "__call__") as call:
call.return_value = datacatalog.Entry()
client.get_entry(request)
@@ -2648,9 +2770,7 @@ async def test_get_entry_field_headers_async():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_entry), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_entry), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(datacatalog.Entry())
await client.get_entry(request)
@@ -2669,7 +2789,7 @@ def test_get_entry_flattened():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_entry), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.Entry()
@@ -2701,9 +2821,7 @@ async def test_get_entry_flattened_async():
client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_entry), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.Entry()
@@ -2744,14 +2862,14 @@ def test_lookup_entry(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.lookup_entry), "__call__") as call:
+ with mock.patch.object(type(client.transport.lookup_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.Entry(
name="name_value",
linked_resource="linked_resource_value",
display_name="display_name_value",
description="description_value",
- type=datacatalog.EntryType.TABLE,
+ type_=datacatalog.EntryType.TABLE,
integrated_system=common.IntegratedSystem.BIGQUERY,
gcs_fileset_spec=gcs_fileset_spec.GcsFilesetSpec(
file_patterns=["file_patterns_value"]
@@ -2767,6 +2885,7 @@ def test_lookup_entry(
assert args[0] == datacatalog.LookupEntryRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, datacatalog.Entry)
assert response.name == "name_value"
@@ -2783,19 +2902,19 @@ def test_lookup_entry_from_dict():
@pytest.mark.asyncio
-async def test_lookup_entry_async(transport: str = "grpc_asyncio"):
+async def test_lookup_entry_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.LookupEntryRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.LookupEntryRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.lookup_entry), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.lookup_entry), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datacatalog.Entry(
@@ -2812,7 +2931,7 @@ async def test_lookup_entry_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.LookupEntryRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, datacatalog.Entry)
@@ -2826,6 +2945,11 @@ async def test_lookup_entry_async(transport: str = "grpc_asyncio"):
assert response.description == "description_value"
+@pytest.mark.asyncio
+async def test_lookup_entry_async_from_dict():
+ await test_lookup_entry_async(request_type=dict)
+
+
def test_list_entries(
transport: str = "grpc", request_type=datacatalog.ListEntriesRequest
):
@@ -2838,7 +2962,7 @@ def test_list_entries(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_entries), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_entries), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.ListEntriesResponse(
next_page_token="next_page_token_value",
@@ -2853,6 +2977,7 @@ def test_list_entries(
assert args[0] == datacatalog.ListEntriesRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, pagers.ListEntriesPager)
assert response.next_page_token == "next_page_token_value"
@@ -2863,19 +2988,19 @@ def test_list_entries_from_dict():
@pytest.mark.asyncio
-async def test_list_entries_async(transport: str = "grpc_asyncio"):
+async def test_list_entries_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.ListEntriesRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.ListEntriesRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_entries), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_entries), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datacatalog.ListEntriesResponse(next_page_token="next_page_token_value",)
@@ -2887,7 +3012,7 @@ async def test_list_entries_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.ListEntriesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListEntriesAsyncPager)
@@ -2895,6 +3020,11 @@ async def test_list_entries_async(transport: str = "grpc_asyncio"):
assert response.next_page_token == "next_page_token_value"
+@pytest.mark.asyncio
+async def test_list_entries_async_from_dict():
+ await test_list_entries_async(request_type=dict)
+
+
def test_list_entries_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -2904,7 +3034,7 @@ def test_list_entries_field_headers():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_entries), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_entries), "__call__") as call:
call.return_value = datacatalog.ListEntriesResponse()
client.list_entries(request)
@@ -2929,9 +3059,7 @@ async def test_list_entries_field_headers_async():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_entries), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_entries), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datacatalog.ListEntriesResponse()
)
@@ -2952,7 +3080,7 @@ def test_list_entries_flattened():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_entries), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_entries), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.ListEntriesResponse()
@@ -2984,9 +3112,7 @@ async def test_list_entries_flattened_async():
client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_entries), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_entries), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.ListEntriesResponse()
@@ -3021,7 +3147,7 @@ def test_list_entries_pager():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_entries), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_entries), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
datacatalog.ListEntriesResponse(
@@ -3059,7 +3185,7 @@ def test_list_entries_pages():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_entries), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_entries), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
datacatalog.ListEntriesResponse(
@@ -3080,8 +3206,8 @@ def test_list_entries_pages():
RuntimeError,
)
pages = list(client.list_entries(request={}).pages)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
@@ -3090,9 +3216,7 @@ async def test_list_entries_async_pager():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_entries),
- "__call__",
- new_callable=mock.AsyncMock,
+ type(client.transport.list_entries), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -3129,9 +3253,7 @@ async def test_list_entries_async_pages():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_entries),
- "__call__",
- new_callable=mock.AsyncMock,
+ type(client.transport.list_entries), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -3153,10 +3275,10 @@ async def test_list_entries_async_pages():
RuntimeError,
)
pages = []
- async for page in (await client.list_entries(request={})).pages:
- pages.append(page)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
+ async for page_ in (await client.list_entries(request={})).pages:
+ pages.append(page_)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
def test_create_tag_template(
@@ -3172,7 +3294,7 @@ def test_create_tag_template(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_tag_template), "__call__"
+ type(client.transport.create_tag_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplate(
@@ -3188,6 +3310,7 @@ def test_create_tag_template(
assert args[0] == datacatalog.CreateTagTemplateRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, tags.TagTemplate)
assert response.name == "name_value"
@@ -3200,18 +3323,20 @@ def test_create_tag_template_from_dict():
@pytest.mark.asyncio
-async def test_create_tag_template_async(transport: str = "grpc_asyncio"):
+async def test_create_tag_template_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.CreateTagTemplateRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.CreateTagTemplateRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_tag_template), "__call__"
+ type(client.transport.create_tag_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -3224,7 +3349,7 @@ async def test_create_tag_template_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.CreateTagTemplateRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, tags.TagTemplate)
@@ -3234,6 +3359,11 @@ async def test_create_tag_template_async(transport: str = "grpc_asyncio"):
assert response.display_name == "display_name_value"
+@pytest.mark.asyncio
+async def test_create_tag_template_async_from_dict():
+ await test_create_tag_template_async(request_type=dict)
+
+
def test_create_tag_template_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -3244,7 +3374,7 @@ def test_create_tag_template_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_tag_template), "__call__"
+ type(client.transport.create_tag_template), "__call__"
) as call:
call.return_value = tags.TagTemplate()
@@ -3271,7 +3401,7 @@ async def test_create_tag_template_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_tag_template), "__call__"
+ type(client.transport.create_tag_template), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate())
@@ -3292,7 +3422,7 @@ def test_create_tag_template_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_tag_template), "__call__"
+ type(client.transport.create_tag_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplate()
@@ -3337,7 +3467,7 @@ async def test_create_tag_template_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_tag_template), "__call__"
+ type(client.transport.create_tag_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplate()
@@ -3390,9 +3520,7 @@ def test_get_tag_template(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.get_tag_template), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplate(
name="name_value", display_name="display_name_value",
@@ -3407,6 +3535,7 @@ def test_get_tag_template(
assert args[0] == datacatalog.GetTagTemplateRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, tags.TagTemplate)
assert response.name == "name_value"
@@ -3419,19 +3548,19 @@ def test_get_tag_template_from_dict():
@pytest.mark.asyncio
-async def test_get_tag_template_async(transport: str = "grpc_asyncio"):
+async def test_get_tag_template_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.GetTagTemplateRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.GetTagTemplateRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_tag_template), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
tags.TagTemplate(name="name_value", display_name="display_name_value",)
@@ -3443,7 +3572,7 @@ async def test_get_tag_template_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.GetTagTemplateRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, tags.TagTemplate)
@@ -3453,6 +3582,11 @@ async def test_get_tag_template_async(transport: str = "grpc_asyncio"):
assert response.display_name == "display_name_value"
+@pytest.mark.asyncio
+async def test_get_tag_template_async_from_dict():
+ await test_get_tag_template_async(request_type=dict)
+
+
def test_get_tag_template_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -3462,9 +3596,7 @@ def test_get_tag_template_field_headers():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.get_tag_template), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call:
call.return_value = tags.TagTemplate()
client.get_tag_template(request)
@@ -3489,9 +3621,7 @@ async def test_get_tag_template_field_headers_async():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_tag_template), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate())
await client.get_tag_template(request)
@@ -3510,9 +3640,7 @@ def test_get_tag_template_flattened():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.get_tag_template), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplate()
@@ -3544,9 +3672,7 @@ async def test_get_tag_template_flattened_async():
client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_tag_template), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_tag_template), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplate()
@@ -3588,7 +3714,7 @@ def test_update_tag_template(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.update_tag_template), "__call__"
+ type(client.transport.update_tag_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplate(
@@ -3604,6 +3730,7 @@ def test_update_tag_template(
assert args[0] == datacatalog.UpdateTagTemplateRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, tags.TagTemplate)
assert response.name == "name_value"
@@ -3616,18 +3743,20 @@ def test_update_tag_template_from_dict():
@pytest.mark.asyncio
-async def test_update_tag_template_async(transport: str = "grpc_asyncio"):
+async def test_update_tag_template_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.UpdateTagTemplateRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.UpdateTagTemplateRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.update_tag_template), "__call__"
+ type(client.transport.update_tag_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -3640,7 +3769,7 @@ async def test_update_tag_template_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.UpdateTagTemplateRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, tags.TagTemplate)
@@ -3650,6 +3779,11 @@ async def test_update_tag_template_async(transport: str = "grpc_asyncio"):
assert response.display_name == "display_name_value"
+@pytest.mark.asyncio
+async def test_update_tag_template_async_from_dict():
+ await test_update_tag_template_async(request_type=dict)
+
+
def test_update_tag_template_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -3660,7 +3794,7 @@ def test_update_tag_template_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.update_tag_template), "__call__"
+ type(client.transport.update_tag_template), "__call__"
) as call:
call.return_value = tags.TagTemplate()
@@ -3690,7 +3824,7 @@ async def test_update_tag_template_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.update_tag_template), "__call__"
+ type(client.transport.update_tag_template), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.TagTemplate())
@@ -3714,7 +3848,7 @@ def test_update_tag_template_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.update_tag_template), "__call__"
+ type(client.transport.update_tag_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplate()
@@ -3755,7 +3889,7 @@ async def test_update_tag_template_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.update_tag_template), "__call__"
+ type(client.transport.update_tag_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplate()
@@ -3805,7 +3939,7 @@ def test_delete_tag_template(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_tag_template), "__call__"
+ type(client.transport.delete_tag_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -3827,18 +3961,20 @@ def test_delete_tag_template_from_dict():
@pytest.mark.asyncio
-async def test_delete_tag_template_async(transport: str = "grpc_asyncio"):
+async def test_delete_tag_template_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.DeleteTagTemplateRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.DeleteTagTemplateRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_tag_template), "__call__"
+ type(client.transport.delete_tag_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
@@ -3849,12 +3985,17 @@ async def test_delete_tag_template_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.DeleteTagTemplateRequest()
# Establish that the response is the type that we expect.
assert response is None
+@pytest.mark.asyncio
+async def test_delete_tag_template_async_from_dict():
+ await test_delete_tag_template_async(request_type=dict)
+
+
def test_delete_tag_template_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -3865,7 +4006,7 @@ def test_delete_tag_template_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_tag_template), "__call__"
+ type(client.transport.delete_tag_template), "__call__"
) as call:
call.return_value = None
@@ -3892,7 +4033,7 @@ async def test_delete_tag_template_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_tag_template), "__call__"
+ type(client.transport.delete_tag_template), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
@@ -3913,7 +4054,7 @@ def test_delete_tag_template_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_tag_template), "__call__"
+ type(client.transport.delete_tag_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -3951,7 +4092,7 @@ async def test_delete_tag_template_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_tag_template), "__call__"
+ type(client.transport.delete_tag_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -3996,7 +4137,7 @@ def test_create_tag_template_field(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_tag_template_field), "__call__"
+ type(client.transport.create_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplateField(
@@ -4015,6 +4156,7 @@ def test_create_tag_template_field(
assert args[0] == datacatalog.CreateTagTemplateFieldRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, tags.TagTemplateField)
assert response.name == "name_value"
@@ -4031,18 +4173,21 @@ def test_create_tag_template_field_from_dict():
@pytest.mark.asyncio
-async def test_create_tag_template_field_async(transport: str = "grpc_asyncio"):
+async def test_create_tag_template_field_async(
+ transport: str = "grpc_asyncio",
+ request_type=datacatalog.CreateTagTemplateFieldRequest,
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.CreateTagTemplateFieldRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_tag_template_field), "__call__"
+ type(client.transport.create_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -4060,7 +4205,7 @@ async def test_create_tag_template_field_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.CreateTagTemplateFieldRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, tags.TagTemplateField)
@@ -4074,6 +4219,11 @@ async def test_create_tag_template_field_async(transport: str = "grpc_asyncio"):
assert response.order == 540
+@pytest.mark.asyncio
+async def test_create_tag_template_field_async_from_dict():
+ await test_create_tag_template_field_async(request_type=dict)
+
+
def test_create_tag_template_field_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -4084,7 +4234,7 @@ def test_create_tag_template_field_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_tag_template_field), "__call__"
+ type(client.transport.create_tag_template_field), "__call__"
) as call:
call.return_value = tags.TagTemplateField()
@@ -4111,7 +4261,7 @@ async def test_create_tag_template_field_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_tag_template_field), "__call__"
+ type(client.transport.create_tag_template_field), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
tags.TagTemplateField()
@@ -4134,7 +4284,7 @@ def test_create_tag_template_field_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_tag_template_field), "__call__"
+ type(client.transport.create_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplateField()
@@ -4179,7 +4329,7 @@ async def test_create_tag_template_field_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_tag_template_field), "__call__"
+ type(client.transport.create_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplateField()
@@ -4235,7 +4385,7 @@ def test_update_tag_template_field(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.update_tag_template_field), "__call__"
+ type(client.transport.update_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplateField(
@@ -4254,6 +4404,7 @@ def test_update_tag_template_field(
assert args[0] == datacatalog.UpdateTagTemplateFieldRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, tags.TagTemplateField)
assert response.name == "name_value"
@@ -4270,18 +4421,21 @@ def test_update_tag_template_field_from_dict():
@pytest.mark.asyncio
-async def test_update_tag_template_field_async(transport: str = "grpc_asyncio"):
+async def test_update_tag_template_field_async(
+ transport: str = "grpc_asyncio",
+ request_type=datacatalog.UpdateTagTemplateFieldRequest,
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.UpdateTagTemplateFieldRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.update_tag_template_field), "__call__"
+ type(client.transport.update_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -4299,7 +4453,7 @@ async def test_update_tag_template_field_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.UpdateTagTemplateFieldRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, tags.TagTemplateField)
@@ -4313,6 +4467,11 @@ async def test_update_tag_template_field_async(transport: str = "grpc_asyncio"):
assert response.order == 540
+@pytest.mark.asyncio
+async def test_update_tag_template_field_async_from_dict():
+ await test_update_tag_template_field_async(request_type=dict)
+
+
def test_update_tag_template_field_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -4323,7 +4482,7 @@ def test_update_tag_template_field_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.update_tag_template_field), "__call__"
+ type(client.transport.update_tag_template_field), "__call__"
) as call:
call.return_value = tags.TagTemplateField()
@@ -4350,7 +4509,7 @@ async def test_update_tag_template_field_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.update_tag_template_field), "__call__"
+ type(client.transport.update_tag_template_field), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
tags.TagTemplateField()
@@ -4373,7 +4532,7 @@ def test_update_tag_template_field_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.update_tag_template_field), "__call__"
+ type(client.transport.update_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplateField()
@@ -4418,7 +4577,7 @@ async def test_update_tag_template_field_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.update_tag_template_field), "__call__"
+ type(client.transport.update_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplateField()
@@ -4474,7 +4633,7 @@ def test_rename_tag_template_field(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.rename_tag_template_field), "__call__"
+ type(client.transport.rename_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplateField(
@@ -4493,6 +4652,7 @@ def test_rename_tag_template_field(
assert args[0] == datacatalog.RenameTagTemplateFieldRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, tags.TagTemplateField)
assert response.name == "name_value"
@@ -4509,18 +4669,21 @@ def test_rename_tag_template_field_from_dict():
@pytest.mark.asyncio
-async def test_rename_tag_template_field_async(transport: str = "grpc_asyncio"):
+async def test_rename_tag_template_field_async(
+ transport: str = "grpc_asyncio",
+ request_type=datacatalog.RenameTagTemplateFieldRequest,
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.RenameTagTemplateFieldRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.rename_tag_template_field), "__call__"
+ type(client.transport.rename_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -4538,7 +4701,7 @@ async def test_rename_tag_template_field_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.RenameTagTemplateFieldRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, tags.TagTemplateField)
@@ -4552,6 +4715,11 @@ async def test_rename_tag_template_field_async(transport: str = "grpc_asyncio"):
assert response.order == 540
+@pytest.mark.asyncio
+async def test_rename_tag_template_field_async_from_dict():
+ await test_rename_tag_template_field_async(request_type=dict)
+
+
def test_rename_tag_template_field_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -4562,7 +4730,7 @@ def test_rename_tag_template_field_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.rename_tag_template_field), "__call__"
+ type(client.transport.rename_tag_template_field), "__call__"
) as call:
call.return_value = tags.TagTemplateField()
@@ -4589,7 +4757,7 @@ async def test_rename_tag_template_field_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.rename_tag_template_field), "__call__"
+ type(client.transport.rename_tag_template_field), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
tags.TagTemplateField()
@@ -4612,7 +4780,7 @@ def test_rename_tag_template_field_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.rename_tag_template_field), "__call__"
+ type(client.transport.rename_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplateField()
@@ -4653,7 +4821,7 @@ async def test_rename_tag_template_field_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.rename_tag_template_field), "__call__"
+ type(client.transport.rename_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = tags.TagTemplateField()
@@ -4705,7 +4873,7 @@ def test_delete_tag_template_field(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_tag_template_field), "__call__"
+ type(client.transport.delete_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -4727,18 +4895,21 @@ def test_delete_tag_template_field_from_dict():
@pytest.mark.asyncio
-async def test_delete_tag_template_field_async(transport: str = "grpc_asyncio"):
+async def test_delete_tag_template_field_async(
+ transport: str = "grpc_asyncio",
+ request_type=datacatalog.DeleteTagTemplateFieldRequest,
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.DeleteTagTemplateFieldRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_tag_template_field), "__call__"
+ type(client.transport.delete_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
@@ -4749,12 +4920,17 @@ async def test_delete_tag_template_field_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.DeleteTagTemplateFieldRequest()
# Establish that the response is the type that we expect.
assert response is None
+@pytest.mark.asyncio
+async def test_delete_tag_template_field_async_from_dict():
+ await test_delete_tag_template_field_async(request_type=dict)
+
+
def test_delete_tag_template_field_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -4765,7 +4941,7 @@ def test_delete_tag_template_field_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_tag_template_field), "__call__"
+ type(client.transport.delete_tag_template_field), "__call__"
) as call:
call.return_value = None
@@ -4792,7 +4968,7 @@ async def test_delete_tag_template_field_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_tag_template_field), "__call__"
+ type(client.transport.delete_tag_template_field), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
@@ -4813,7 +4989,7 @@ def test_delete_tag_template_field_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_tag_template_field), "__call__"
+ type(client.transport.delete_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -4851,7 +5027,7 @@ async def test_delete_tag_template_field_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_tag_template_field), "__call__"
+ type(client.transport.delete_tag_template_field), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -4895,7 +5071,7 @@ def test_create_tag(transport: str = "grpc", request_type=datacatalog.CreateTagR
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.create_tag), "__call__") as call:
+ with mock.patch.object(type(client.transport.create_tag), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = tags.Tag(
name="name_value",
@@ -4913,6 +5089,7 @@ def test_create_tag(transport: str = "grpc", request_type=datacatalog.CreateTagR
assert args[0] == datacatalog.CreateTagRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, tags.Tag)
assert response.name == "name_value"
@@ -4927,19 +5104,19 @@ def test_create_tag_from_dict():
@pytest.mark.asyncio
-async def test_create_tag_async(transport: str = "grpc_asyncio"):
+async def test_create_tag_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.CreateTagRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.CreateTagRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.create_tag), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.create_tag), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
tags.Tag(
@@ -4955,7 +5132,7 @@ async def test_create_tag_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.CreateTagRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, tags.Tag)
@@ -4967,6 +5144,11 @@ async def test_create_tag_async(transport: str = "grpc_asyncio"):
assert response.template_display_name == "template_display_name_value"
+@pytest.mark.asyncio
+async def test_create_tag_async_from_dict():
+ await test_create_tag_async(request_type=dict)
+
+
def test_create_tag_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -4976,7 +5158,7 @@ def test_create_tag_field_headers():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.create_tag), "__call__") as call:
+ with mock.patch.object(type(client.transport.create_tag), "__call__") as call:
call.return_value = tags.Tag()
client.create_tag(request)
@@ -5001,9 +5183,7 @@ async def test_create_tag_field_headers_async():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.create_tag), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.create_tag), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag())
await client.create_tag(request)
@@ -5022,7 +5202,7 @@ def test_create_tag_flattened():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.create_tag), "__call__") as call:
+ with mock.patch.object(type(client.transport.create_tag), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = tags.Tag()
@@ -5060,9 +5240,7 @@ async def test_create_tag_flattened_async():
client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.create_tag), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.create_tag), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = tags.Tag()
@@ -5107,7 +5285,7 @@ def test_update_tag(transport: str = "grpc", request_type=datacatalog.UpdateTagR
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.update_tag), "__call__") as call:
+ with mock.patch.object(type(client.transport.update_tag), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = tags.Tag(
name="name_value",
@@ -5125,6 +5303,7 @@ def test_update_tag(transport: str = "grpc", request_type=datacatalog.UpdateTagR
assert args[0] == datacatalog.UpdateTagRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, tags.Tag)
assert response.name == "name_value"
@@ -5139,19 +5318,19 @@ def test_update_tag_from_dict():
@pytest.mark.asyncio
-async def test_update_tag_async(transport: str = "grpc_asyncio"):
+async def test_update_tag_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.UpdateTagRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.UpdateTagRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.update_tag), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.update_tag), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
tags.Tag(
@@ -5167,7 +5346,7 @@ async def test_update_tag_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.UpdateTagRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, tags.Tag)
@@ -5179,6 +5358,11 @@ async def test_update_tag_async(transport: str = "grpc_asyncio"):
assert response.template_display_name == "template_display_name_value"
+@pytest.mark.asyncio
+async def test_update_tag_async_from_dict():
+ await test_update_tag_async(request_type=dict)
+
+
def test_update_tag_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -5188,7 +5372,7 @@ def test_update_tag_field_headers():
request.tag.name = "tag.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.update_tag), "__call__") as call:
+ with mock.patch.object(type(client.transport.update_tag), "__call__") as call:
call.return_value = tags.Tag()
client.update_tag(request)
@@ -5213,9 +5397,7 @@ async def test_update_tag_field_headers_async():
request.tag.name = "tag.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.update_tag), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.update_tag), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(tags.Tag())
await client.update_tag(request)
@@ -5234,7 +5416,7 @@ def test_update_tag_flattened():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.update_tag), "__call__") as call:
+ with mock.patch.object(type(client.transport.update_tag), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = tags.Tag()
@@ -5273,9 +5455,7 @@ async def test_update_tag_flattened_async():
client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.update_tag), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.update_tag), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = tags.Tag()
@@ -5321,7 +5501,7 @@ def test_delete_tag(transport: str = "grpc", request_type=datacatalog.DeleteTagR
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.delete_tag), "__call__") as call:
+ with mock.patch.object(type(client.transport.delete_tag), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -5342,19 +5522,19 @@ def test_delete_tag_from_dict():
@pytest.mark.asyncio
-async def test_delete_tag_async(transport: str = "grpc_asyncio"):
+async def test_delete_tag_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.DeleteTagRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.DeleteTagRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.delete_tag), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.delete_tag), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
@@ -5364,12 +5544,17 @@ async def test_delete_tag_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.DeleteTagRequest()
# Establish that the response is the type that we expect.
assert response is None
+@pytest.mark.asyncio
+async def test_delete_tag_async_from_dict():
+ await test_delete_tag_async(request_type=dict)
+
+
def test_delete_tag_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -5379,7 +5564,7 @@ def test_delete_tag_field_headers():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.delete_tag), "__call__") as call:
+ with mock.patch.object(type(client.transport.delete_tag), "__call__") as call:
call.return_value = None
client.delete_tag(request)
@@ -5404,9 +5589,7 @@ async def test_delete_tag_field_headers_async():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.delete_tag), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.delete_tag), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
await client.delete_tag(request)
@@ -5425,7 +5608,7 @@ def test_delete_tag_flattened():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.delete_tag), "__call__") as call:
+ with mock.patch.object(type(client.transport.delete_tag), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -5457,9 +5640,7 @@ async def test_delete_tag_flattened_async():
client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.delete_tag), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.delete_tag), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -5498,7 +5679,7 @@ def test_list_tags(transport: str = "grpc", request_type=datacatalog.ListTagsReq
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_tags), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_tags), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.ListTagsResponse(
next_page_token="next_page_token_value",
@@ -5513,6 +5694,7 @@ def test_list_tags(transport: str = "grpc", request_type=datacatalog.ListTagsReq
assert args[0] == datacatalog.ListTagsRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, pagers.ListTagsPager)
assert response.next_page_token == "next_page_token_value"
@@ -5523,19 +5705,19 @@ def test_list_tags_from_dict():
@pytest.mark.asyncio
-async def test_list_tags_async(transport: str = "grpc_asyncio"):
+async def test_list_tags_async(
+ transport: str = "grpc_asyncio", request_type=datacatalog.ListTagsRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = datacatalog.ListTagsRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_tags), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_tags), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datacatalog.ListTagsResponse(next_page_token="next_page_token_value",)
@@ -5547,7 +5729,7 @@ async def test_list_tags_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == datacatalog.ListTagsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListTagsAsyncPager)
@@ -5555,6 +5737,11 @@ async def test_list_tags_async(transport: str = "grpc_asyncio"):
assert response.next_page_token == "next_page_token_value"
+@pytest.mark.asyncio
+async def test_list_tags_async_from_dict():
+ await test_list_tags_async(request_type=dict)
+
+
def test_list_tags_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -5564,7 +5751,7 @@ def test_list_tags_field_headers():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_tags), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_tags), "__call__") as call:
call.return_value = datacatalog.ListTagsResponse()
client.list_tags(request)
@@ -5589,9 +5776,7 @@ async def test_list_tags_field_headers_async():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_tags), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_tags), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
datacatalog.ListTagsResponse()
)
@@ -5612,7 +5797,7 @@ def test_list_tags_flattened():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_tags), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_tags), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.ListTagsResponse()
@@ -5644,9 +5829,7 @@ async def test_list_tags_flattened_async():
client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_tags), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_tags), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = datacatalog.ListTagsResponse()
@@ -5681,7 +5864,7 @@ def test_list_tags_pager():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_tags), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_tags), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
datacatalog.ListTagsResponse(
@@ -5710,7 +5893,7 @@ def test_list_tags_pages():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_tags), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_tags), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
datacatalog.ListTagsResponse(
@@ -5722,8 +5905,8 @@ def test_list_tags_pages():
RuntimeError,
)
pages = list(client.list_tags(request={}).pages)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
@@ -5732,9 +5915,7 @@ async def test_list_tags_async_pager():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_tags),
- "__call__",
- new_callable=mock.AsyncMock,
+ type(client.transport.list_tags), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -5762,9 +5943,7 @@ async def test_list_tags_async_pages():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_tags),
- "__call__",
- new_callable=mock.AsyncMock,
+ type(client.transport.list_tags), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -5777,10 +5956,10 @@ async def test_list_tags_async_pages():
RuntimeError,
)
pages = []
- async for page in (await client.list_tags(request={})).pages:
- pages.append(page)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
+ async for page_ in (await client.list_tags(request={})).pages:
+ pages.append(page_)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
def test_set_iam_policy(
@@ -5795,7 +5974,7 @@ def test_set_iam_policy(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy(version=774, etag=b"etag_blob",)
@@ -5808,6 +5987,7 @@ def test_set_iam_policy(
assert args[0] == iam_policy.SetIamPolicyRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, policy.Policy)
assert response.version == 774
@@ -5820,19 +6000,19 @@ def test_set_iam_policy_from_dict():
@pytest.mark.asyncio
-async def test_set_iam_policy_async(transport: str = "grpc_asyncio"):
+async def test_set_iam_policy_async(
+ transport: str = "grpc_asyncio", request_type=iam_policy.SetIamPolicyRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = iam_policy.SetIamPolicyRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.set_iam_policy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
policy.Policy(version=774, etag=b"etag_blob",)
@@ -5844,7 +6024,7 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == iam_policy.SetIamPolicyRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, policy.Policy)
@@ -5854,6 +6034,11 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"):
assert response.etag == b"etag_blob"
+@pytest.mark.asyncio
+async def test_set_iam_policy_async_from_dict():
+ await test_set_iam_policy_async(request_type=dict)
+
+
def test_set_iam_policy_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -5863,7 +6048,7 @@ def test_set_iam_policy_field_headers():
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
call.return_value = policy.Policy()
client.set_iam_policy(request)
@@ -5888,9 +6073,7 @@ async def test_set_iam_policy_field_headers_async():
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.set_iam_policy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy())
await client.set_iam_policy(request)
@@ -5905,10 +6088,10 @@ async def test_set_iam_policy_field_headers_async():
assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"]
-def test_set_iam_policy_from_dict():
+def test_set_iam_policy_from_dict_foreign():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
@@ -5925,7 +6108,7 @@ def test_set_iam_policy_flattened():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
@@ -5957,9 +6140,7 @@ async def test_set_iam_policy_flattened_async():
client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.set_iam_policy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
@@ -6000,7 +6181,7 @@ def test_get_iam_policy(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy(version=774, etag=b"etag_blob",)
@@ -6013,6 +6194,7 @@ def test_get_iam_policy(
assert args[0] == iam_policy.GetIamPolicyRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, policy.Policy)
assert response.version == 774
@@ -6025,19 +6207,19 @@ def test_get_iam_policy_from_dict():
@pytest.mark.asyncio
-async def test_get_iam_policy_async(transport: str = "grpc_asyncio"):
+async def test_get_iam_policy_async(
+ transport: str = "grpc_asyncio", request_type=iam_policy.GetIamPolicyRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = iam_policy.GetIamPolicyRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_iam_policy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
policy.Policy(version=774, etag=b"etag_blob",)
@@ -6049,7 +6231,7 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == iam_policy.GetIamPolicyRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, policy.Policy)
@@ -6059,6 +6241,11 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"):
assert response.etag == b"etag_blob"
+@pytest.mark.asyncio
+async def test_get_iam_policy_async_from_dict():
+ await test_get_iam_policy_async(request_type=dict)
+
+
def test_get_iam_policy_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -6068,7 +6255,7 @@ def test_get_iam_policy_field_headers():
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
call.return_value = policy.Policy()
client.get_iam_policy(request)
@@ -6093,9 +6280,7 @@ async def test_get_iam_policy_field_headers_async():
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_iam_policy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy())
await client.get_iam_policy(request)
@@ -6110,10 +6295,10 @@ async def test_get_iam_policy_field_headers_async():
assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"]
-def test_get_iam_policy_from_dict():
+def test_get_iam_policy_from_dict_foreign():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
@@ -6130,7 +6315,7 @@ def test_get_iam_policy_flattened():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
@@ -6162,9 +6347,7 @@ async def test_get_iam_policy_flattened_async():
client = DataCatalogAsyncClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_iam_policy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
@@ -6206,7 +6389,7 @@ def test_test_iam_permissions(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.test_iam_permissions), "__call__"
+ type(client.transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = iam_policy.TestIamPermissionsResponse(
@@ -6222,6 +6405,7 @@ def test_test_iam_permissions(
assert args[0] == iam_policy.TestIamPermissionsRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, iam_policy.TestIamPermissionsResponse)
assert response.permissions == ["permissions_value"]
@@ -6232,18 +6416,20 @@ def test_test_iam_permissions_from_dict():
@pytest.mark.asyncio
-async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"):
+async def test_test_iam_permissions_async(
+ transport: str = "grpc_asyncio", request_type=iam_policy.TestIamPermissionsRequest
+):
client = DataCatalogAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = iam_policy.TestIamPermissionsRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.test_iam_permissions), "__call__"
+ type(client.transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -6256,7 +6442,7 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == iam_policy.TestIamPermissionsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, iam_policy.TestIamPermissionsResponse)
@@ -6264,6 +6450,11 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"):
assert response.permissions == ["permissions_value"]
+@pytest.mark.asyncio
+async def test_test_iam_permissions_async_from_dict():
+ await test_test_iam_permissions_async(request_type=dict)
+
+
def test_test_iam_permissions_field_headers():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
@@ -6274,7 +6465,7 @@ def test_test_iam_permissions_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.test_iam_permissions), "__call__"
+ type(client.transport.test_iam_permissions), "__call__"
) as call:
call.return_value = iam_policy.TestIamPermissionsResponse()
@@ -6301,7 +6492,7 @@ async def test_test_iam_permissions_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.test_iam_permissions), "__call__"
+ type(client.transport.test_iam_permissions), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
iam_policy.TestIamPermissionsResponse()
@@ -6319,11 +6510,11 @@ async def test_test_iam_permissions_field_headers_async():
assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"]
-def test_test_iam_permissions_from_dict():
+def test_test_iam_permissions_from_dict_foreign():
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.test_iam_permissions), "__call__"
+ type(client.transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = iam_policy.TestIamPermissionsResponse()
@@ -6373,7 +6564,7 @@ def test_transport_instance():
credentials=credentials.AnonymousCredentials(),
)
client = DataCatalogClient(transport=transport)
- assert client._transport is transport
+ assert client.transport is transport
def test_transport_get_channel():
@@ -6391,10 +6582,22 @@ def test_transport_get_channel():
assert channel
+@pytest.mark.parametrize(
+ "transport_class",
+ [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport],
+)
+def test_transport_adc(transport_class):
+ # Test default credentials are used if not provided.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport_class()
+ adc.assert_called_once()
+
+
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = DataCatalogClient(credentials=credentials.AnonymousCredentials(),)
- assert isinstance(client._transport, transports.DataCatalogGrpcTransport,)
+ assert isinstance(client.transport, transports.DataCatalogGrpcTransport,)
def test_data_catalog_base_transport_error():
@@ -6471,6 +6674,17 @@ def test_data_catalog_base_transport_with_credentials_file():
)
+def test_data_catalog_base_transport_with_adc():
+ # Test the default credentials are used if credentials and credentials_file are None.
+ with mock.patch.object(auth, "default") as adc, mock.patch(
+ "google.cloud.datacatalog_v1beta1.services.data_catalog.transports.DataCatalogTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport = transports.DataCatalogTransport()
+ adc.assert_called_once()
+
+
def test_data_catalog_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(auth, "default") as adc:
@@ -6503,7 +6717,7 @@ def test_data_catalog_host_no_port():
api_endpoint="datacatalog.googleapis.com"
),
)
- assert client._transport._host == "datacatalog.googleapis.com:443"
+ assert client.transport._host == "datacatalog.googleapis.com:443"
def test_data_catalog_host_with_port():
@@ -6513,210 +6727,111 @@ def test_data_catalog_host_with_port():
api_endpoint="datacatalog.googleapis.com:8000"
),
)
- assert client._transport._host == "datacatalog.googleapis.com:8000"
+ assert client.transport._host == "datacatalog.googleapis.com:8000"
def test_data_catalog_grpc_transport_channel():
channel = grpc.insecure_channel("http://localhost/")
- # Check that if channel is provided, mtls endpoint and client_cert_source
- # won't be used.
- callback = mock.MagicMock()
+ # Check that channel is used if provided.
transport = transports.DataCatalogGrpcTransport(
- host="squid.clam.whelk",
- channel=channel,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=callback,
+ host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
- assert not callback.called
+ assert transport._ssl_channel_credentials == None
def test_data_catalog_grpc_asyncio_transport_channel():
channel = aio.insecure_channel("http://localhost/")
- # Check that if channel is provided, mtls endpoint and client_cert_source
- # won't be used.
- callback = mock.MagicMock()
+ # Check that channel is used if provided.
transport = transports.DataCatalogGrpcAsyncIOTransport(
- host="squid.clam.whelk",
- channel=channel,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=callback,
+ host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
- assert not callback.called
-
-
-@mock.patch("grpc.ssl_channel_credentials", autospec=True)
-@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True)
-def test_data_catalog_grpc_transport_channel_mtls_with_client_cert_source(
- grpc_create_channel, grpc_ssl_channel_cred
-):
- # Check that if channel is None, but api_mtls_endpoint and client_cert_source
- # are provided, then a mTLS channel will be created.
- mock_cred = mock.Mock()
-
- mock_ssl_cred = mock.Mock()
- grpc_ssl_channel_cred.return_value = mock_ssl_cred
-
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
-
- transport = transports.DataCatalogGrpcTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=client_cert_source_callback,
- )
- grpc_ssl_channel_cred.assert_called_once_with(
- certificate_chain=b"cert bytes", private_key=b"key bytes"
- )
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
- )
- assert transport.grpc_channel == mock_grpc_channel
-
-
-@mock.patch("grpc.ssl_channel_credentials", autospec=True)
-@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True)
-def test_data_catalog_grpc_asyncio_transport_channel_mtls_with_client_cert_source(
- grpc_create_channel, grpc_ssl_channel_cred
-):
- # Check that if channel is None, but api_mtls_endpoint and client_cert_source
- # are provided, then a mTLS channel will be created.
- mock_cred = mock.Mock()
-
- mock_ssl_cred = mock.Mock()
- grpc_ssl_channel_cred.return_value = mock_ssl_cred
-
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
-
- transport = transports.DataCatalogGrpcAsyncIOTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=client_cert_source_callback,
- )
- grpc_ssl_channel_cred.assert_called_once_with(
- certificate_chain=b"cert bytes", private_key=b"key bytes"
- )
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
- )
- assert transport.grpc_channel == mock_grpc_channel
+ assert transport._ssl_channel_credentials == None
@pytest.mark.parametrize(
- "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"]
+ "transport_class",
+ [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport],
)
-@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True)
-def test_data_catalog_grpc_transport_channel_mtls_with_adc(
- grpc_create_channel, api_mtls_endpoint
-):
- # Check that if channel and client_cert_source are None, but api_mtls_endpoint
- # is provided, then a mTLS channel will be created with SSL ADC.
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
-
- # Mock google.auth.transport.grpc.SslCredentials class.
- mock_ssl_cred = mock.Mock()
- with mock.patch.multiple(
- "google.auth.transport.grpc.SslCredentials",
- __init__=mock.Mock(return_value=None),
- ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
- ):
- mock_cred = mock.Mock()
- transport = transports.DataCatalogGrpcTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint=api_mtls_endpoint,
- client_cert_source=None,
- )
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
- )
- assert transport.grpc_channel == mock_grpc_channel
+def test_data_catalog_transport_channel_mtls_with_client_cert_source(transport_class):
+ with mock.patch(
+ "grpc.ssl_channel_credentials", autospec=True
+ ) as grpc_ssl_channel_cred:
+ with mock.patch.object(
+ transport_class, "create_channel", autospec=True
+ ) as grpc_create_channel:
+ mock_ssl_cred = mock.Mock()
+ grpc_ssl_channel_cred.return_value = mock_ssl_cred
+
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+
+ cred = credentials.AnonymousCredentials()
+ with pytest.warns(DeprecationWarning):
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (cred, None)
+ transport = transport_class(
+ host="squid.clam.whelk",
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=client_cert_source_callback,
+ )
+ adc.assert_called_once()
+
+ grpc_ssl_channel_cred.assert_called_once_with(
+ certificate_chain=b"cert bytes", private_key=b"key bytes"
+ )
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+ assert transport._ssl_channel_credentials == mock_ssl_cred
@pytest.mark.parametrize(
- "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"]
+ "transport_class",
+ [transports.DataCatalogGrpcTransport, transports.DataCatalogGrpcAsyncIOTransport],
)
-@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True)
-def test_data_catalog_grpc_asyncio_transport_channel_mtls_with_adc(
- grpc_create_channel, api_mtls_endpoint
-):
- # Check that if channel and client_cert_source are None, but api_mtls_endpoint
- # is provided, then a mTLS channel will be created with SSL ADC.
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
-
- # Mock google.auth.transport.grpc.SslCredentials class.
+def test_data_catalog_transport_channel_mtls_with_adc(transport_class):
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
- mock_cred = mock.Mock()
- transport = transports.DataCatalogGrpcAsyncIOTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint=api_mtls_endpoint,
- client_cert_source=None,
- )
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
- )
- assert transport.grpc_channel == mock_grpc_channel
-
-
-def test_tag_template_path():
- project = "squid"
- location = "clam"
- tag_template = "whelk"
-
- expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}".format(
- project=project, location=location, tag_template=tag_template,
- )
- actual = DataCatalogClient.tag_template_path(project, location, tag_template)
- assert expected == actual
-
-
-def test_parse_tag_template_path():
- expected = {
- "project": "octopus",
- "location": "oyster",
- "tag_template": "nudibranch",
- }
- path = DataCatalogClient.tag_template_path(**expected)
+ with mock.patch.object(
+ transport_class, "create_channel", autospec=True
+ ) as grpc_create_channel:
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+ mock_cred = mock.Mock()
+
+ with pytest.warns(DeprecationWarning):
+ transport = transport_class(
+ host="squid.clam.whelk",
+ credentials=mock_cred,
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=None,
+ )
- # Check that the path construction is reversible.
- actual = DataCatalogClient.parse_tag_template_path(path)
- assert expected == actual
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=mock_cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ )
+ assert transport.grpc_channel == mock_grpc_channel
def test_entry_path():
@@ -6747,9 +6862,9 @@ def test_parse_entry_path():
def test_entry_group_path():
- project = "squid"
- location = "clam"
- entry_group = "whelk"
+ project = "winkle"
+ location = "nautilus"
+ entry_group = "scallop"
expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}".format(
project=project, location=location, entry_group=entry_group,
@@ -6760,9 +6875,9 @@ def test_entry_group_path():
def test_parse_entry_group_path():
expected = {
- "project": "octopus",
- "location": "oyster",
- "entry_group": "nudibranch",
+ "project": "abalone",
+ "location": "squid",
+ "entry_group": "clam",
}
path = DataCatalogClient.entry_group_path(**expected)
@@ -6771,11 +6886,69 @@ def test_parse_entry_group_path():
assert expected == actual
-def test_tag_template_field_path():
+def test_tag_path():
+ project = "whelk"
+ location = "octopus"
+ entry_group = "oyster"
+ entry = "nudibranch"
+ tag = "cuttlefish"
+
+ expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}".format(
+ project=project,
+ location=location,
+ entry_group=entry_group,
+ entry=entry,
+ tag=tag,
+ )
+ actual = DataCatalogClient.tag_path(project, location, entry_group, entry, tag)
+ assert expected == actual
+
+
+def test_parse_tag_path():
+ expected = {
+ "project": "mussel",
+ "location": "winkle",
+ "entry_group": "nautilus",
+ "entry": "scallop",
+ "tag": "abalone",
+ }
+ path = DataCatalogClient.tag_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = DataCatalogClient.parse_tag_path(path)
+ assert expected == actual
+
+
+def test_tag_template_path():
project = "squid"
location = "clam"
tag_template = "whelk"
- field = "octopus"
+
+ expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}".format(
+ project=project, location=location, tag_template=tag_template,
+ )
+ actual = DataCatalogClient.tag_template_path(project, location, tag_template)
+ assert expected == actual
+
+
+def test_parse_tag_template_path():
+ expected = {
+ "project": "octopus",
+ "location": "oyster",
+ "tag_template": "nudibranch",
+ }
+ path = DataCatalogClient.tag_template_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = DataCatalogClient.parse_tag_template_path(path)
+ assert expected == actual
+
+
+def test_tag_template_field_path():
+ project = "cuttlefish"
+ location = "mussel"
+ tag_template = "winkle"
+ field = "nautilus"
expected = "projects/{project}/locations/{location}/tagTemplates/{tag_template}/fields/{field}".format(
project=project, location=location, tag_template=tag_template, field=field,
@@ -6788,10 +6961,10 @@ def test_tag_template_field_path():
def test_parse_tag_template_field_path():
expected = {
- "project": "oyster",
- "location": "nudibranch",
- "tag_template": "cuttlefish",
- "field": "mussel",
+ "project": "scallop",
+ "location": "abalone",
+ "tag_template": "squid",
+ "field": "clam",
}
path = DataCatalogClient.tag_template_field_path(**expected)
@@ -6800,34 +6973,123 @@ def test_parse_tag_template_field_path():
assert expected == actual
-def test_tag_path():
- project = "squid"
- location = "clam"
- entry_group = "whelk"
- entry = "octopus"
- tag = "oyster"
+def test_common_billing_account_path():
+ billing_account = "whelk"
- expected = "projects/{project}/locations/{location}/entryGroups/{entry_group}/entries/{entry}/tags/{tag}".format(
- project=project,
- location=location,
- entry_group=entry_group,
- entry=entry,
- tag=tag,
+ expected = "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
)
- actual = DataCatalogClient.tag_path(project, location, entry_group, entry, tag)
+ actual = DataCatalogClient.common_billing_account_path(billing_account)
assert expected == actual
-def test_parse_tag_path():
+def test_parse_common_billing_account_path():
expected = {
- "project": "nudibranch",
- "location": "cuttlefish",
- "entry_group": "mussel",
- "entry": "winkle",
- "tag": "nautilus",
+ "billing_account": "octopus",
}
- path = DataCatalogClient.tag_path(**expected)
+ path = DataCatalogClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
- actual = DataCatalogClient.parse_tag_path(path)
+ actual = DataCatalogClient.parse_common_billing_account_path(path)
assert expected == actual
+
+
+def test_common_folder_path():
+ folder = "oyster"
+
+ expected = "folders/{folder}".format(folder=folder,)
+ actual = DataCatalogClient.common_folder_path(folder)
+ assert expected == actual
+
+
+def test_parse_common_folder_path():
+ expected = {
+ "folder": "nudibranch",
+ }
+ path = DataCatalogClient.common_folder_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = DataCatalogClient.parse_common_folder_path(path)
+ assert expected == actual
+
+
+def test_common_organization_path():
+ organization = "cuttlefish"
+
+ expected = "organizations/{organization}".format(organization=organization,)
+ actual = DataCatalogClient.common_organization_path(organization)
+ assert expected == actual
+
+
+def test_parse_common_organization_path():
+ expected = {
+ "organization": "mussel",
+ }
+ path = DataCatalogClient.common_organization_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = DataCatalogClient.parse_common_organization_path(path)
+ assert expected == actual
+
+
+def test_common_project_path():
+ project = "winkle"
+
+ expected = "projects/{project}".format(project=project,)
+ actual = DataCatalogClient.common_project_path(project)
+ assert expected == actual
+
+
+def test_parse_common_project_path():
+ expected = {
+ "project": "nautilus",
+ }
+ path = DataCatalogClient.common_project_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = DataCatalogClient.parse_common_project_path(path)
+ assert expected == actual
+
+
+def test_common_location_path():
+ project = "scallop"
+ location = "abalone"
+
+ expected = "projects/{project}/locations/{location}".format(
+ project=project, location=location,
+ )
+ actual = DataCatalogClient.common_location_path(project, location)
+ assert expected == actual
+
+
+def test_parse_common_location_path():
+ expected = {
+ "project": "squid",
+ "location": "clam",
+ }
+ path = DataCatalogClient.common_location_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = DataCatalogClient.parse_common_location_path(path)
+ assert expected == actual
+
+
+def test_client_withDEFAULT_CLIENT_INFO():
+ client_info = gapic_v1.client_info.ClientInfo()
+
+ with mock.patch.object(
+ transports.DataCatalogTransport, "_prep_wrapped_messages"
+ ) as prep:
+ client = DataCatalogClient(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
+
+ with mock.patch.object(
+ transports.DataCatalogTransport, "_prep_wrapped_messages"
+ ) as prep:
+ transport_class = DataCatalogClient.get_transport_class()
+ transport = transport_class(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
diff --git a/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py b/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py
index de5f0342..fc201fe0 100644
--- a/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py
+++ b/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager.py
@@ -104,12 +104,12 @@ def test_policy_tag_manager_client_from_service_account_file(client_class):
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
- assert client._transport._credentials == creds
+ assert client.transport._credentials == creds
client = client_class.from_service_account_json("dummy/file/path.json")
- assert client._transport._credentials == creds
+ assert client.transport._credentials == creds
- assert client._transport._host == "datacatalog.googleapis.com:443"
+ assert client.transport._host == "datacatalog.googleapis.com:443"
def test_policy_tag_manager_client_get_transport_class():
@@ -165,14 +165,14 @@ def test_policy_tag_manager_client_client_options(
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- api_mtls_endpoint="squid.clam.whelk",
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}):
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
@@ -181,14 +181,14 @@ def test_policy_tag_manager_client_client_options(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}):
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
@@ -197,90 +197,185 @@ def test_policy_tag_manager_client_client_options(
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
- # "auto", and client_cert_source is provided.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+ # unsupported value.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+ with pytest.raises(MutualTLSChannelError):
+ client = client_class()
+
+ # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
+ ):
+ with pytest.raises(ValueError):
+ client = client_class()
+
+ # Check the case quota_project_id is provided
+ options = client_options.ClientOptions(quota_project_id="octopus")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id="octopus",
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name,use_client_cert_env",
+ [
+ (
+ PolicyTagManagerClient,
+ transports.PolicyTagManagerGrpcTransport,
+ "grpc",
+ "true",
+ ),
+ (
+ PolicyTagManagerAsyncClient,
+ transports.PolicyTagManagerGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "true",
+ ),
+ (
+ PolicyTagManagerClient,
+ transports.PolicyTagManagerGrpcTransport,
+ "grpc",
+ "false",
+ ),
+ (
+ PolicyTagManagerAsyncClient,
+ transports.PolicyTagManagerGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "false",
+ ),
+ ],
+)
+@mock.patch.object(
+ PolicyTagManagerClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(PolicyTagManagerClient),
+)
+@mock.patch.object(
+ PolicyTagManagerAsyncClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(PolicyTagManagerAsyncClient),
+)
+@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
+def test_policy_tag_manager_client_mtls_env_auto(
+ client_class, transport_class, transport_name, use_client_cert_env
+):
+ # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
+ # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
+
+ # Check the case client_cert_source is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
- patched.return_value = None
- client = client_class(client_options=options)
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=client.DEFAULT_MTLS_ENDPOINT,
- scopes=None,
- api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
- client_cert_source=client_cert_source_callback,
- quota_project_id=None,
- )
-
- # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
- # "auto", and default_client_cert_source is provided.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
- with mock.patch.object(transport_class, "__init__") as patched:
+ ssl_channel_creds = mock.Mock()
with mock.patch(
- "google.auth.transport.mtls.has_default_client_cert_source",
- return_value=True,
+ "grpc.ssl_channel_credentials", return_value=ssl_channel_creds
):
patched.return_value = None
- client = client_class()
+ client = client_class(client_options=options)
+
+ if use_client_cert_env == "false":
+ expected_ssl_channel_creds = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_ssl_channel_creds = ssl_channel_creds
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_MTLS_ENDPOINT,
+ host=expected_host,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=expected_ssl_channel_creds,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
- # "auto", but client_cert_source and default_client_cert_source are None.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
+ # Check the case ADC client cert is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
- "google.auth.transport.mtls.has_default_client_cert_source",
- return_value=False,
+ "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
):
- patched.return_value = None
- client = client_class()
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
- scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
- quota_project_id=None,
- )
-
- # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has
- # unsupported value.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}):
- with pytest.raises(MutualTLSChannelError):
- client = client_class()
-
- # Check the case quota_project_id is provided
- options = client_options.ClientOptions(quota_project_id="octopus")
- with mock.patch.object(transport_class, "__init__") as patched:
- patched.return_value = None
- client = client_class(client_options=options)
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
- scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
- quota_project_id="octopus",
- )
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.is_mtls",
+ new_callable=mock.PropertyMock,
+ ) as is_mtls_mock:
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.ssl_credentials",
+ new_callable=mock.PropertyMock,
+ ) as ssl_credentials_mock:
+ if use_client_cert_env == "false":
+ is_mtls_mock.return_value = False
+ ssl_credentials_mock.return_value = None
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_ssl_channel_creds = None
+ else:
+ is_mtls_mock.return_value = True
+ ssl_credentials_mock.return_value = mock.Mock()
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_ssl_channel_creds = (
+ ssl_credentials_mock.return_value
+ )
+
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ ssl_channel_credentials=expected_ssl_channel_creds,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
+ ):
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.is_mtls",
+ new_callable=mock.PropertyMock,
+ ) as is_mtls_mock:
+ is_mtls_mock.return_value = False
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
@pytest.mark.parametrize(
@@ -307,9 +402,9 @@ def test_policy_tag_manager_client_client_options_scopes(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -337,9 +432,9 @@ def test_policy_tag_manager_client_client_options_credentials_file(
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -356,9 +451,9 @@ def test_policy_tag_manager_client_client_options_from_dict():
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- api_mtls_endpoint="squid.clam.whelk",
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -374,7 +469,7 @@ def test_create_taxonomy(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.create_taxonomy), "__call__") as call:
+ with mock.patch.object(type(client.transport.create_taxonomy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policytagmanager.Taxonomy(
name="name_value",
@@ -394,6 +489,7 @@ def test_create_taxonomy(
assert args[0] == policytagmanager.CreateTaxonomyRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, policytagmanager.Taxonomy)
assert response.name == "name_value"
@@ -412,19 +508,19 @@ def test_create_taxonomy_from_dict():
@pytest.mark.asyncio
-async def test_create_taxonomy_async(transport: str = "grpc_asyncio"):
+async def test_create_taxonomy_async(
+ transport: str = "grpc_asyncio", request_type=policytagmanager.CreateTaxonomyRequest
+):
client = PolicyTagManagerAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = policytagmanager.CreateTaxonomyRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.create_taxonomy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.create_taxonomy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
policytagmanager.Taxonomy(
@@ -443,7 +539,7 @@ async def test_create_taxonomy_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == policytagmanager.CreateTaxonomyRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, policytagmanager.Taxonomy)
@@ -459,6 +555,11 @@ async def test_create_taxonomy_async(transport: str = "grpc_asyncio"):
]
+@pytest.mark.asyncio
+async def test_create_taxonomy_async_from_dict():
+ await test_create_taxonomy_async(request_type=dict)
+
+
def test_create_taxonomy_field_headers():
client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),)
@@ -468,7 +569,7 @@ def test_create_taxonomy_field_headers():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.create_taxonomy), "__call__") as call:
+ with mock.patch.object(type(client.transport.create_taxonomy), "__call__") as call:
call.return_value = policytagmanager.Taxonomy()
client.create_taxonomy(request)
@@ -495,9 +596,7 @@ async def test_create_taxonomy_field_headers_async():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.create_taxonomy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.create_taxonomy), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
policytagmanager.Taxonomy()
)
@@ -518,7 +617,7 @@ def test_create_taxonomy_flattened():
client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.create_taxonomy), "__call__") as call:
+ with mock.patch.object(type(client.transport.create_taxonomy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policytagmanager.Taxonomy()
@@ -559,9 +658,7 @@ async def test_create_taxonomy_flattened_async():
)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.create_taxonomy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.create_taxonomy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policytagmanager.Taxonomy()
@@ -613,7 +710,7 @@ def test_delete_taxonomy(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.delete_taxonomy), "__call__") as call:
+ with mock.patch.object(type(client.transport.delete_taxonomy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -634,19 +731,19 @@ def test_delete_taxonomy_from_dict():
@pytest.mark.asyncio
-async def test_delete_taxonomy_async(transport: str = "grpc_asyncio"):
+async def test_delete_taxonomy_async(
+ transport: str = "grpc_asyncio", request_type=policytagmanager.DeleteTaxonomyRequest
+):
client = PolicyTagManagerAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = policytagmanager.DeleteTaxonomyRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.delete_taxonomy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.delete_taxonomy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
@@ -656,12 +753,17 @@ async def test_delete_taxonomy_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == policytagmanager.DeleteTaxonomyRequest()
# Establish that the response is the type that we expect.
assert response is None
+@pytest.mark.asyncio
+async def test_delete_taxonomy_async_from_dict():
+ await test_delete_taxonomy_async(request_type=dict)
+
+
def test_delete_taxonomy_field_headers():
client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),)
@@ -671,7 +773,7 @@ def test_delete_taxonomy_field_headers():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.delete_taxonomy), "__call__") as call:
+ with mock.patch.object(type(client.transport.delete_taxonomy), "__call__") as call:
call.return_value = None
client.delete_taxonomy(request)
@@ -698,9 +800,7 @@ async def test_delete_taxonomy_field_headers_async():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.delete_taxonomy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.delete_taxonomy), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
await client.delete_taxonomy(request)
@@ -719,7 +819,7 @@ def test_delete_taxonomy_flattened():
client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.delete_taxonomy), "__call__") as call:
+ with mock.patch.object(type(client.transport.delete_taxonomy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -753,9 +853,7 @@ async def test_delete_taxonomy_flattened_async():
)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.delete_taxonomy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.delete_taxonomy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -798,7 +896,7 @@ def test_update_taxonomy(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.update_taxonomy), "__call__") as call:
+ with mock.patch.object(type(client.transport.update_taxonomy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policytagmanager.Taxonomy(
name="name_value",
@@ -818,6 +916,7 @@ def test_update_taxonomy(
assert args[0] == policytagmanager.UpdateTaxonomyRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, policytagmanager.Taxonomy)
assert response.name == "name_value"
@@ -836,19 +935,19 @@ def test_update_taxonomy_from_dict():
@pytest.mark.asyncio
-async def test_update_taxonomy_async(transport: str = "grpc_asyncio"):
+async def test_update_taxonomy_async(
+ transport: str = "grpc_asyncio", request_type=policytagmanager.UpdateTaxonomyRequest
+):
client = PolicyTagManagerAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = policytagmanager.UpdateTaxonomyRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.update_taxonomy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.update_taxonomy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
policytagmanager.Taxonomy(
@@ -867,7 +966,7 @@ async def test_update_taxonomy_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == policytagmanager.UpdateTaxonomyRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, policytagmanager.Taxonomy)
@@ -883,6 +982,11 @@ async def test_update_taxonomy_async(transport: str = "grpc_asyncio"):
]
+@pytest.mark.asyncio
+async def test_update_taxonomy_async_from_dict():
+ await test_update_taxonomy_async(request_type=dict)
+
+
def test_update_taxonomy_field_headers():
client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),)
@@ -892,7 +996,7 @@ def test_update_taxonomy_field_headers():
request.taxonomy.name = "taxonomy.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.update_taxonomy), "__call__") as call:
+ with mock.patch.object(type(client.transport.update_taxonomy), "__call__") as call:
call.return_value = policytagmanager.Taxonomy()
client.update_taxonomy(request)
@@ -921,9 +1025,7 @@ async def test_update_taxonomy_field_headers_async():
request.taxonomy.name = "taxonomy.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.update_taxonomy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.update_taxonomy), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
policytagmanager.Taxonomy()
)
@@ -946,7 +1048,7 @@ def test_update_taxonomy_flattened():
client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.update_taxonomy), "__call__") as call:
+ with mock.patch.object(type(client.transport.update_taxonomy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policytagmanager.Taxonomy()
@@ -981,9 +1083,7 @@ async def test_update_taxonomy_flattened_async():
)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.update_taxonomy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.update_taxonomy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policytagmanager.Taxonomy()
@@ -1031,7 +1131,7 @@ def test_list_taxonomies(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_taxonomies), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policytagmanager.ListTaxonomiesResponse(
next_page_token="next_page_token_value",
@@ -1046,6 +1146,7 @@ def test_list_taxonomies(
assert args[0] == policytagmanager.ListTaxonomiesRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, pagers.ListTaxonomiesPager)
assert response.next_page_token == "next_page_token_value"
@@ -1056,19 +1157,19 @@ def test_list_taxonomies_from_dict():
@pytest.mark.asyncio
-async def test_list_taxonomies_async(transport: str = "grpc_asyncio"):
+async def test_list_taxonomies_async(
+ transport: str = "grpc_asyncio", request_type=policytagmanager.ListTaxonomiesRequest
+):
client = PolicyTagManagerAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = policytagmanager.ListTaxonomiesRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_taxonomies), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
policytagmanager.ListTaxonomiesResponse(
@@ -1082,7 +1183,7 @@ async def test_list_taxonomies_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == policytagmanager.ListTaxonomiesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListTaxonomiesAsyncPager)
@@ -1090,6 +1191,11 @@ async def test_list_taxonomies_async(transport: str = "grpc_asyncio"):
assert response.next_page_token == "next_page_token_value"
+@pytest.mark.asyncio
+async def test_list_taxonomies_async_from_dict():
+ await test_list_taxonomies_async(request_type=dict)
+
+
def test_list_taxonomies_field_headers():
client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),)
@@ -1099,7 +1205,7 @@ def test_list_taxonomies_field_headers():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_taxonomies), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call:
call.return_value = policytagmanager.ListTaxonomiesResponse()
client.list_taxonomies(request)
@@ -1126,9 +1232,7 @@ async def test_list_taxonomies_field_headers_async():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_taxonomies), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
policytagmanager.ListTaxonomiesResponse()
)
@@ -1149,7 +1253,7 @@ def test_list_taxonomies_flattened():
client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_taxonomies), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policytagmanager.ListTaxonomiesResponse()
@@ -1183,9 +1287,7 @@ async def test_list_taxonomies_flattened_async():
)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_taxonomies), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policytagmanager.ListTaxonomiesResponse()
@@ -1222,7 +1324,7 @@ def test_list_taxonomies_pager():
client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_taxonomies), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
policytagmanager.ListTaxonomiesResponse(
@@ -1262,7 +1364,7 @@ def test_list_taxonomies_pages():
client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_taxonomies), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_taxonomies), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
policytagmanager.ListTaxonomiesResponse(
@@ -1285,8 +1387,8 @@ def test_list_taxonomies_pages():
RuntimeError,
)
pages = list(client.list_taxonomies(request={}).pages)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
@@ -1295,9 +1397,7 @@ async def test_list_taxonomies_async_pager():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_taxonomies),
- "__call__",
- new_callable=mock.AsyncMock,
+ type(client.transport.list_taxonomies), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -1336,9 +1436,7 @@ async def test_list_taxonomies_async_pages():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_taxonomies),
- "__call__",
- new_callable=mock.AsyncMock,
+ type(client.transport.list_taxonomies), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -1362,10 +1460,10 @@ async def test_list_taxonomies_async_pages():
RuntimeError,
)
pages = []
- async for page in (await client.list_taxonomies(request={})).pages:
- pages.append(page)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
+ async for page_ in (await client.list_taxonomies(request={})).pages:
+ pages.append(page_)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
def test_get_taxonomy(
@@ -1380,7 +1478,7 @@ def test_get_taxonomy(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_taxonomy), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_taxonomy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policytagmanager.Taxonomy(
name="name_value",
@@ -1400,6 +1498,7 @@ def test_get_taxonomy(
assert args[0] == policytagmanager.GetTaxonomyRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, policytagmanager.Taxonomy)
assert response.name == "name_value"
@@ -1418,19 +1517,19 @@ def test_get_taxonomy_from_dict():
@pytest.mark.asyncio
-async def test_get_taxonomy_async(transport: str = "grpc_asyncio"):
+async def test_get_taxonomy_async(
+ transport: str = "grpc_asyncio", request_type=policytagmanager.GetTaxonomyRequest
+):
client = PolicyTagManagerAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = policytagmanager.GetTaxonomyRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_taxonomy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_taxonomy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
policytagmanager.Taxonomy(
@@ -1449,7 +1548,7 @@ async def test_get_taxonomy_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == policytagmanager.GetTaxonomyRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, policytagmanager.Taxonomy)
@@ -1465,6 +1564,11 @@ async def test_get_taxonomy_async(transport: str = "grpc_asyncio"):
]
+@pytest.mark.asyncio
+async def test_get_taxonomy_async_from_dict():
+ await test_get_taxonomy_async(request_type=dict)
+
+
def test_get_taxonomy_field_headers():
client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),)
@@ -1474,7 +1578,7 @@ def test_get_taxonomy_field_headers():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_taxonomy), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_taxonomy), "__call__") as call:
call.return_value = policytagmanager.Taxonomy()
client.get_taxonomy(request)
@@ -1501,9 +1605,7 @@ async def test_get_taxonomy_field_headers_async():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_taxonomy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_taxonomy), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
policytagmanager.Taxonomy()
)
@@ -1524,7 +1626,7 @@ def test_get_taxonomy_flattened():
client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_taxonomy), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_taxonomy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policytagmanager.Taxonomy()
@@ -1558,9 +1660,7 @@ async def test_get_taxonomy_flattened_async():
)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_taxonomy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_taxonomy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policytagmanager.Taxonomy()
@@ -1606,7 +1706,7 @@ def test_create_policy_tag(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_policy_tag), "__call__"
+ type(client.transport.create_policy_tag), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = policytagmanager.PolicyTag(
@@ -1626,6 +1726,7 @@ def test_create_policy_tag(
assert args[0] == policytagmanager.CreatePolicyTagRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, policytagmanager.PolicyTag)
assert response.name == "name_value"
@@ -1644,18 +1745,21 @@ def test_create_policy_tag_from_dict():
@pytest.mark.asyncio
-async def test_create_policy_tag_async(transport: str = "grpc_asyncio"):
+async def test_create_policy_tag_async(
+ transport: str = "grpc_asyncio",
+ request_type=policytagmanager.CreatePolicyTagRequest,
+):
client = PolicyTagManagerAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = policytagmanager.CreatePolicyTagRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_policy_tag), "__call__"
+ type(client.transport.create_policy_tag), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -1674,7 +1778,7 @@ async def test_create_policy_tag_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == policytagmanager.CreatePolicyTagRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, policytagmanager.PolicyTag)
@@ -1690,6 +1794,11 @@ async def test_create_policy_tag_async(transport: str = "grpc_asyncio"):
assert response.child_policy_tags == ["child_policy_tags_value"]
+@pytest.mark.asyncio
+async def test_create_policy_tag_async_from_dict():
+ await test_create_policy_tag_async(request_type=dict)
+
+
def test_create_policy_tag_field_headers():
client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),)
@@ -1700,7 +1809,7 @@ def test_create_policy_tag_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_policy_tag), "__call__"
+ type(client.transport.create_policy_tag), "__call__"
) as call:
call.return_value = policytagmanager.PolicyTag()
@@ -1729,7 +1838,7 @@ async def test_create_policy_tag_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_policy_tag), "__call__"
+ type(client.transport.create_policy_tag), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
policytagmanager.PolicyTag()
@@ -1752,7 +1861,7 @@ def test_create_policy_tag_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_policy_tag), "__call__"
+ type(client.transport.create_policy_tag), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = policytagmanager.PolicyTag()
@@ -1795,7 +1904,7 @@ async def test_create_policy_tag_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_policy_tag), "__call__"
+ type(client.transport.create_policy_tag), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = policytagmanager.PolicyTag()
@@ -1849,7 +1958,7 @@ def test_delete_policy_tag(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_policy_tag), "__call__"
+ type(client.transport.delete_policy_tag), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -1871,18 +1980,21 @@ def test_delete_policy_tag_from_dict():
@pytest.mark.asyncio
-async def test_delete_policy_tag_async(transport: str = "grpc_asyncio"):
+async def test_delete_policy_tag_async(
+ transport: str = "grpc_asyncio",
+ request_type=policytagmanager.DeletePolicyTagRequest,
+):
client = PolicyTagManagerAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = policytagmanager.DeletePolicyTagRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_policy_tag), "__call__"
+ type(client.transport.delete_policy_tag), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
@@ -1893,12 +2005,17 @@ async def test_delete_policy_tag_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == policytagmanager.DeletePolicyTagRequest()
# Establish that the response is the type that we expect.
assert response is None
+@pytest.mark.asyncio
+async def test_delete_policy_tag_async_from_dict():
+ await test_delete_policy_tag_async(request_type=dict)
+
+
def test_delete_policy_tag_field_headers():
client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),)
@@ -1909,7 +2026,7 @@ def test_delete_policy_tag_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_policy_tag), "__call__"
+ type(client.transport.delete_policy_tag), "__call__"
) as call:
call.return_value = None
@@ -1938,7 +2055,7 @@ async def test_delete_policy_tag_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_policy_tag), "__call__"
+ type(client.transport.delete_policy_tag), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
@@ -1959,7 +2076,7 @@ def test_delete_policy_tag_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_policy_tag), "__call__"
+ type(client.transport.delete_policy_tag), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -1995,7 +2112,7 @@ async def test_delete_policy_tag_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_policy_tag), "__call__"
+ type(client.transport.delete_policy_tag), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -2040,7 +2157,7 @@ def test_update_policy_tag(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.update_policy_tag), "__call__"
+ type(client.transport.update_policy_tag), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = policytagmanager.PolicyTag(
@@ -2060,6 +2177,7 @@ def test_update_policy_tag(
assert args[0] == policytagmanager.UpdatePolicyTagRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, policytagmanager.PolicyTag)
assert response.name == "name_value"
@@ -2078,18 +2196,21 @@ def test_update_policy_tag_from_dict():
@pytest.mark.asyncio
-async def test_update_policy_tag_async(transport: str = "grpc_asyncio"):
+async def test_update_policy_tag_async(
+ transport: str = "grpc_asyncio",
+ request_type=policytagmanager.UpdatePolicyTagRequest,
+):
client = PolicyTagManagerAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = policytagmanager.UpdatePolicyTagRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.update_policy_tag), "__call__"
+ type(client.transport.update_policy_tag), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -2108,7 +2229,7 @@ async def test_update_policy_tag_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == policytagmanager.UpdatePolicyTagRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, policytagmanager.PolicyTag)
@@ -2124,6 +2245,11 @@ async def test_update_policy_tag_async(transport: str = "grpc_asyncio"):
assert response.child_policy_tags == ["child_policy_tags_value"]
+@pytest.mark.asyncio
+async def test_update_policy_tag_async_from_dict():
+ await test_update_policy_tag_async(request_type=dict)
+
+
def test_update_policy_tag_field_headers():
client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),)
@@ -2134,7 +2260,7 @@ def test_update_policy_tag_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.update_policy_tag), "__call__"
+ type(client.transport.update_policy_tag), "__call__"
) as call:
call.return_value = policytagmanager.PolicyTag()
@@ -2165,7 +2291,7 @@ async def test_update_policy_tag_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.update_policy_tag), "__call__"
+ type(client.transport.update_policy_tag), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
policytagmanager.PolicyTag()
@@ -2190,7 +2316,7 @@ def test_update_policy_tag_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.update_policy_tag), "__call__"
+ type(client.transport.update_policy_tag), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = policytagmanager.PolicyTag()
@@ -2229,7 +2355,7 @@ async def test_update_policy_tag_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.update_policy_tag), "__call__"
+ type(client.transport.update_policy_tag), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = policytagmanager.PolicyTag()
@@ -2278,9 +2404,7 @@ def test_list_policy_tags(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.list_policy_tags), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policytagmanager.ListPolicyTagsResponse(
next_page_token="next_page_token_value",
@@ -2295,6 +2419,7 @@ def test_list_policy_tags(
assert args[0] == policytagmanager.ListPolicyTagsRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, pagers.ListPolicyTagsPager)
assert response.next_page_token == "next_page_token_value"
@@ -2305,19 +2430,19 @@ def test_list_policy_tags_from_dict():
@pytest.mark.asyncio
-async def test_list_policy_tags_async(transport: str = "grpc_asyncio"):
+async def test_list_policy_tags_async(
+ transport: str = "grpc_asyncio", request_type=policytagmanager.ListPolicyTagsRequest
+):
client = PolicyTagManagerAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = policytagmanager.ListPolicyTagsRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_policy_tags), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
policytagmanager.ListPolicyTagsResponse(
@@ -2331,7 +2456,7 @@ async def test_list_policy_tags_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == policytagmanager.ListPolicyTagsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListPolicyTagsAsyncPager)
@@ -2339,6 +2464,11 @@ async def test_list_policy_tags_async(transport: str = "grpc_asyncio"):
assert response.next_page_token == "next_page_token_value"
+@pytest.mark.asyncio
+async def test_list_policy_tags_async_from_dict():
+ await test_list_policy_tags_async(request_type=dict)
+
+
def test_list_policy_tags_field_headers():
client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),)
@@ -2348,9 +2478,7 @@ def test_list_policy_tags_field_headers():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.list_policy_tags), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call:
call.return_value = policytagmanager.ListPolicyTagsResponse()
client.list_policy_tags(request)
@@ -2377,9 +2505,7 @@ async def test_list_policy_tags_field_headers_async():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_policy_tags), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
policytagmanager.ListPolicyTagsResponse()
)
@@ -2400,9 +2526,7 @@ def test_list_policy_tags_flattened():
client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.list_policy_tags), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policytagmanager.ListPolicyTagsResponse()
@@ -2436,9 +2560,7 @@ async def test_list_policy_tags_flattened_async():
)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_policy_tags), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policytagmanager.ListPolicyTagsResponse()
@@ -2475,9 +2597,7 @@ def test_list_policy_tags_pager():
client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.list_policy_tags), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
policytagmanager.ListPolicyTagsResponse(
@@ -2520,9 +2640,7 @@ def test_list_policy_tags_pages():
client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.list_policy_tags), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_policy_tags), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
policytagmanager.ListPolicyTagsResponse(
@@ -2548,8 +2666,8 @@ def test_list_policy_tags_pages():
RuntimeError,
)
pages = list(client.list_policy_tags(request={}).pages)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
@@ -2558,9 +2676,7 @@ async def test_list_policy_tags_async_pager():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_policy_tags),
- "__call__",
- new_callable=mock.AsyncMock,
+ type(client.transport.list_policy_tags), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -2602,9 +2718,7 @@ async def test_list_policy_tags_async_pages():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_policy_tags),
- "__call__",
- new_callable=mock.AsyncMock,
+ type(client.transport.list_policy_tags), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -2631,10 +2745,10 @@ async def test_list_policy_tags_async_pages():
RuntimeError,
)
pages = []
- async for page in (await client.list_policy_tags(request={})).pages:
- pages.append(page)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
+ async for page_ in (await client.list_policy_tags(request={})).pages:
+ pages.append(page_)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
def test_get_policy_tag(
@@ -2649,7 +2763,7 @@ def test_get_policy_tag(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_policy_tag), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_policy_tag), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policytagmanager.PolicyTag(
name="name_value",
@@ -2668,6 +2782,7 @@ def test_get_policy_tag(
assert args[0] == policytagmanager.GetPolicyTagRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, policytagmanager.PolicyTag)
assert response.name == "name_value"
@@ -2686,19 +2801,19 @@ def test_get_policy_tag_from_dict():
@pytest.mark.asyncio
-async def test_get_policy_tag_async(transport: str = "grpc_asyncio"):
+async def test_get_policy_tag_async(
+ transport: str = "grpc_asyncio", request_type=policytagmanager.GetPolicyTagRequest
+):
client = PolicyTagManagerAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = policytagmanager.GetPolicyTagRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_policy_tag), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_policy_tag), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
policytagmanager.PolicyTag(
@@ -2716,7 +2831,7 @@ async def test_get_policy_tag_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == policytagmanager.GetPolicyTagRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, policytagmanager.PolicyTag)
@@ -2732,6 +2847,11 @@ async def test_get_policy_tag_async(transport: str = "grpc_asyncio"):
assert response.child_policy_tags == ["child_policy_tags_value"]
+@pytest.mark.asyncio
+async def test_get_policy_tag_async_from_dict():
+ await test_get_policy_tag_async(request_type=dict)
+
+
def test_get_policy_tag_field_headers():
client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),)
@@ -2741,7 +2861,7 @@ def test_get_policy_tag_field_headers():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_policy_tag), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_policy_tag), "__call__") as call:
call.return_value = policytagmanager.PolicyTag()
client.get_policy_tag(request)
@@ -2768,9 +2888,7 @@ async def test_get_policy_tag_field_headers_async():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_policy_tag), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_policy_tag), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
policytagmanager.PolicyTag()
)
@@ -2791,7 +2909,7 @@ def test_get_policy_tag_flattened():
client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_policy_tag), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_policy_tag), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policytagmanager.PolicyTag()
@@ -2825,9 +2943,7 @@ async def test_get_policy_tag_flattened_async():
)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_policy_tag), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_policy_tag), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policytagmanager.PolicyTag()
@@ -2872,7 +2988,7 @@ def test_get_iam_policy(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy(version=774, etag=b"etag_blob",)
@@ -2885,6 +3001,7 @@ def test_get_iam_policy(
assert args[0] == iam_policy.GetIamPolicyRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, policy.Policy)
assert response.version == 774
@@ -2897,19 +3014,19 @@ def test_get_iam_policy_from_dict():
@pytest.mark.asyncio
-async def test_get_iam_policy_async(transport: str = "grpc_asyncio"):
+async def test_get_iam_policy_async(
+ transport: str = "grpc_asyncio", request_type=iam_policy.GetIamPolicyRequest
+):
client = PolicyTagManagerAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = iam_policy.GetIamPolicyRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_iam_policy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
policy.Policy(version=774, etag=b"etag_blob",)
@@ -2921,7 +3038,7 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == iam_policy.GetIamPolicyRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, policy.Policy)
@@ -2931,6 +3048,11 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"):
assert response.etag == b"etag_blob"
+@pytest.mark.asyncio
+async def test_get_iam_policy_async_from_dict():
+ await test_get_iam_policy_async(request_type=dict)
+
+
def test_get_iam_policy_field_headers():
client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),)
@@ -2940,7 +3062,7 @@ def test_get_iam_policy_field_headers():
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
call.return_value = policy.Policy()
client.get_iam_policy(request)
@@ -2967,9 +3089,7 @@ async def test_get_iam_policy_field_headers_async():
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_iam_policy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy())
await client.get_iam_policy(request)
@@ -2984,10 +3104,10 @@ async def test_get_iam_policy_field_headers_async():
assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"]
-def test_get_iam_policy_from_dict():
+def test_get_iam_policy_from_dict_foreign():
client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
@@ -3012,7 +3132,7 @@ def test_set_iam_policy(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy(version=774, etag=b"etag_blob",)
@@ -3025,6 +3145,7 @@ def test_set_iam_policy(
assert args[0] == iam_policy.SetIamPolicyRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, policy.Policy)
assert response.version == 774
@@ -3037,19 +3158,19 @@ def test_set_iam_policy_from_dict():
@pytest.mark.asyncio
-async def test_set_iam_policy_async(transport: str = "grpc_asyncio"):
+async def test_set_iam_policy_async(
+ transport: str = "grpc_asyncio", request_type=iam_policy.SetIamPolicyRequest
+):
client = PolicyTagManagerAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = iam_policy.SetIamPolicyRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.set_iam_policy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
policy.Policy(version=774, etag=b"etag_blob",)
@@ -3061,7 +3182,7 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == iam_policy.SetIamPolicyRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, policy.Policy)
@@ -3071,6 +3192,11 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"):
assert response.etag == b"etag_blob"
+@pytest.mark.asyncio
+async def test_set_iam_policy_async_from_dict():
+ await test_set_iam_policy_async(request_type=dict)
+
+
def test_set_iam_policy_field_headers():
client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),)
@@ -3080,7 +3206,7 @@ def test_set_iam_policy_field_headers():
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
call.return_value = policy.Policy()
client.set_iam_policy(request)
@@ -3107,9 +3233,7 @@ async def test_set_iam_policy_field_headers_async():
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.set_iam_policy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy())
await client.set_iam_policy(request)
@@ -3124,10 +3248,10 @@ async def test_set_iam_policy_field_headers_async():
assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"]
-def test_set_iam_policy_from_dict():
+def test_set_iam_policy_from_dict_foreign():
client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
@@ -3153,7 +3277,7 @@ def test_test_iam_permissions(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.test_iam_permissions), "__call__"
+ type(client.transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = iam_policy.TestIamPermissionsResponse(
@@ -3169,6 +3293,7 @@ def test_test_iam_permissions(
assert args[0] == iam_policy.TestIamPermissionsRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, iam_policy.TestIamPermissionsResponse)
assert response.permissions == ["permissions_value"]
@@ -3179,18 +3304,20 @@ def test_test_iam_permissions_from_dict():
@pytest.mark.asyncio
-async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"):
+async def test_test_iam_permissions_async(
+ transport: str = "grpc_asyncio", request_type=iam_policy.TestIamPermissionsRequest
+):
client = PolicyTagManagerAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = iam_policy.TestIamPermissionsRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.test_iam_permissions), "__call__"
+ type(client.transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -3203,7 +3330,7 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == iam_policy.TestIamPermissionsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, iam_policy.TestIamPermissionsResponse)
@@ -3211,6 +3338,11 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"):
assert response.permissions == ["permissions_value"]
+@pytest.mark.asyncio
+async def test_test_iam_permissions_async_from_dict():
+ await test_test_iam_permissions_async(request_type=dict)
+
+
def test_test_iam_permissions_field_headers():
client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),)
@@ -3221,7 +3353,7 @@ def test_test_iam_permissions_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.test_iam_permissions), "__call__"
+ type(client.transport.test_iam_permissions), "__call__"
) as call:
call.return_value = iam_policy.TestIamPermissionsResponse()
@@ -3250,7 +3382,7 @@ async def test_test_iam_permissions_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.test_iam_permissions), "__call__"
+ type(client.transport.test_iam_permissions), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
iam_policy.TestIamPermissionsResponse()
@@ -3268,11 +3400,11 @@ async def test_test_iam_permissions_field_headers_async():
assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"]
-def test_test_iam_permissions_from_dict():
+def test_test_iam_permissions_from_dict_foreign():
client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.test_iam_permissions), "__call__"
+ type(client.transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = iam_policy.TestIamPermissionsResponse()
@@ -3322,7 +3454,7 @@ def test_transport_instance():
credentials=credentials.AnonymousCredentials(),
)
client = PolicyTagManagerClient(transport=transport)
- assert client._transport is transport
+ assert client.transport is transport
def test_transport_get_channel():
@@ -3340,10 +3472,25 @@ def test_transport_get_channel():
assert channel
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.PolicyTagManagerGrpcTransport,
+ transports.PolicyTagManagerGrpcAsyncIOTransport,
+ ],
+)
+def test_transport_adc(transport_class):
+ # Test default credentials are used if not provided.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport_class()
+ adc.assert_called_once()
+
+
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = PolicyTagManagerClient(credentials=credentials.AnonymousCredentials(),)
- assert isinstance(client._transport, transports.PolicyTagManagerGrpcTransport,)
+ assert isinstance(client.transport, transports.PolicyTagManagerGrpcTransport,)
def test_policy_tag_manager_base_transport_error():
@@ -3406,6 +3553,17 @@ def test_policy_tag_manager_base_transport_with_credentials_file():
)
+def test_policy_tag_manager_base_transport_with_adc():
+ # Test the default credentials are used if credentials and credentials_file are None.
+ with mock.patch.object(auth, "default") as adc, mock.patch(
+ "google.cloud.datacatalog_v1beta1.services.policy_tag_manager.transports.PolicyTagManagerTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport = transports.PolicyTagManagerTransport()
+ adc.assert_called_once()
+
+
def test_policy_tag_manager_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(auth, "default") as adc:
@@ -3438,7 +3596,7 @@ def test_policy_tag_manager_host_no_port():
api_endpoint="datacatalog.googleapis.com"
),
)
- assert client._transport._host == "datacatalog.googleapis.com:443"
+ assert client.transport._host == "datacatalog.googleapis.com:443"
def test_policy_tag_manager_host_with_port():
@@ -3448,191 +3606,154 @@ def test_policy_tag_manager_host_with_port():
api_endpoint="datacatalog.googleapis.com:8000"
),
)
- assert client._transport._host == "datacatalog.googleapis.com:8000"
+ assert client.transport._host == "datacatalog.googleapis.com:8000"
def test_policy_tag_manager_grpc_transport_channel():
channel = grpc.insecure_channel("http://localhost/")
- # Check that if channel is provided, mtls endpoint and client_cert_source
- # won't be used.
- callback = mock.MagicMock()
+ # Check that channel is used if provided.
transport = transports.PolicyTagManagerGrpcTransport(
- host="squid.clam.whelk",
- channel=channel,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=callback,
+ host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
- assert not callback.called
+ assert transport._ssl_channel_credentials == None
def test_policy_tag_manager_grpc_asyncio_transport_channel():
channel = aio.insecure_channel("http://localhost/")
- # Check that if channel is provided, mtls endpoint and client_cert_source
- # won't be used.
- callback = mock.MagicMock()
+ # Check that channel is used if provided.
transport = transports.PolicyTagManagerGrpcAsyncIOTransport(
- host="squid.clam.whelk",
- channel=channel,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=callback,
+ host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
- assert not callback.called
-
-
-@mock.patch("grpc.ssl_channel_credentials", autospec=True)
-@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True)
-def test_policy_tag_manager_grpc_transport_channel_mtls_with_client_cert_source(
- grpc_create_channel, grpc_ssl_channel_cred
-):
- # Check that if channel is None, but api_mtls_endpoint and client_cert_source
- # are provided, then a mTLS channel will be created.
- mock_cred = mock.Mock()
-
- mock_ssl_cred = mock.Mock()
- grpc_ssl_channel_cred.return_value = mock_ssl_cred
-
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
-
- transport = transports.PolicyTagManagerGrpcTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=client_cert_source_callback,
- )
- grpc_ssl_channel_cred.assert_called_once_with(
- certificate_chain=b"cert bytes", private_key=b"key bytes"
- )
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
- )
- assert transport.grpc_channel == mock_grpc_channel
-
-
-@mock.patch("grpc.ssl_channel_credentials", autospec=True)
-@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True)
-def test_policy_tag_manager_grpc_asyncio_transport_channel_mtls_with_client_cert_source(
- grpc_create_channel, grpc_ssl_channel_cred
-):
- # Check that if channel is None, but api_mtls_endpoint and client_cert_source
- # are provided, then a mTLS channel will be created.
- mock_cred = mock.Mock()
-
- mock_ssl_cred = mock.Mock()
- grpc_ssl_channel_cred.return_value = mock_ssl_cred
-
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
-
- transport = transports.PolicyTagManagerGrpcAsyncIOTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=client_cert_source_callback,
- )
- grpc_ssl_channel_cred.assert_called_once_with(
- certificate_chain=b"cert bytes", private_key=b"key bytes"
- )
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
- )
- assert transport.grpc_channel == mock_grpc_channel
+ assert transport._ssl_channel_credentials == None
@pytest.mark.parametrize(
- "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"]
+ "transport_class",
+ [
+ transports.PolicyTagManagerGrpcTransport,
+ transports.PolicyTagManagerGrpcAsyncIOTransport,
+ ],
)
-@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True)
-def test_policy_tag_manager_grpc_transport_channel_mtls_with_adc(
- grpc_create_channel, api_mtls_endpoint
+def test_policy_tag_manager_transport_channel_mtls_with_client_cert_source(
+ transport_class,
):
- # Check that if channel and client_cert_source are None, but api_mtls_endpoint
- # is provided, then a mTLS channel will be created with SSL ADC.
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
-
- # Mock google.auth.transport.grpc.SslCredentials class.
- mock_ssl_cred = mock.Mock()
- with mock.patch.multiple(
- "google.auth.transport.grpc.SslCredentials",
- __init__=mock.Mock(return_value=None),
- ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
- ):
- mock_cred = mock.Mock()
- transport = transports.PolicyTagManagerGrpcTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint=api_mtls_endpoint,
- client_cert_source=None,
- )
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
- )
- assert transport.grpc_channel == mock_grpc_channel
+ with mock.patch(
+ "grpc.ssl_channel_credentials", autospec=True
+ ) as grpc_ssl_channel_cred:
+ with mock.patch.object(
+ transport_class, "create_channel", autospec=True
+ ) as grpc_create_channel:
+ mock_ssl_cred = mock.Mock()
+ grpc_ssl_channel_cred.return_value = mock_ssl_cred
+
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+
+ cred = credentials.AnonymousCredentials()
+ with pytest.warns(DeprecationWarning):
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (cred, None)
+ transport = transport_class(
+ host="squid.clam.whelk",
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=client_cert_source_callback,
+ )
+ adc.assert_called_once()
+
+ grpc_ssl_channel_cred.assert_called_once_with(
+ certificate_chain=b"cert bytes", private_key=b"key bytes"
+ )
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+ assert transport._ssl_channel_credentials == mock_ssl_cred
@pytest.mark.parametrize(
- "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"]
+ "transport_class",
+ [
+ transports.PolicyTagManagerGrpcTransport,
+ transports.PolicyTagManagerGrpcAsyncIOTransport,
+ ],
)
-@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True)
-def test_policy_tag_manager_grpc_asyncio_transport_channel_mtls_with_adc(
- grpc_create_channel, api_mtls_endpoint
-):
- # Check that if channel and client_cert_source are None, but api_mtls_endpoint
- # is provided, then a mTLS channel will be created with SSL ADC.
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
-
- # Mock google.auth.transport.grpc.SslCredentials class.
+def test_policy_tag_manager_transport_channel_mtls_with_adc(transport_class):
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
- mock_cred = mock.Mock()
- transport = transports.PolicyTagManagerGrpcAsyncIOTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint=api_mtls_endpoint,
- client_cert_source=None,
- )
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
- )
- assert transport.grpc_channel == mock_grpc_channel
+ with mock.patch.object(
+ transport_class, "create_channel", autospec=True
+ ) as grpc_create_channel:
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+ mock_cred = mock.Mock()
+
+ with pytest.warns(DeprecationWarning):
+ transport = transport_class(
+ host="squid.clam.whelk",
+ credentials=mock_cred,
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=None,
+ )
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=mock_cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ )
+ assert transport.grpc_channel == mock_grpc_channel
-def test_taxonomy_path():
+
+def test_policy_tag_path():
project = "squid"
location = "clam"
taxonomy = "whelk"
+ policy_tag = "octopus"
+
+ expected = "projects/{project}/locations/{location}/taxonomies/{taxonomy}/policyTags/{policy_tag}".format(
+ project=project, location=location, taxonomy=taxonomy, policy_tag=policy_tag,
+ )
+ actual = PolicyTagManagerClient.policy_tag_path(
+ project, location, taxonomy, policy_tag
+ )
+ assert expected == actual
+
+
+def test_parse_policy_tag_path():
+ expected = {
+ "project": "oyster",
+ "location": "nudibranch",
+ "taxonomy": "cuttlefish",
+ "policy_tag": "mussel",
+ }
+ path = PolicyTagManagerClient.policy_tag_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = PolicyTagManagerClient.parse_policy_tag_path(path)
+ assert expected == actual
+
+
+def test_taxonomy_path():
+ project = "winkle"
+ location = "nautilus"
+ taxonomy = "scallop"
expected = "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format(
project=project, location=location, taxonomy=taxonomy,
@@ -3643,9 +3764,9 @@ def test_taxonomy_path():
def test_parse_taxonomy_path():
expected = {
- "project": "octopus",
- "location": "oyster",
- "taxonomy": "nudibranch",
+ "project": "abalone",
+ "location": "squid",
+ "taxonomy": "clam",
}
path = PolicyTagManagerClient.taxonomy_path(**expected)
@@ -3654,30 +3775,123 @@ def test_parse_taxonomy_path():
assert expected == actual
-def test_policy_tag_path():
- project = "squid"
- location = "clam"
- taxonomy = "whelk"
- policy_tag = "octopus"
+def test_common_billing_account_path():
+ billing_account = "whelk"
- expected = "projects/{project}/locations/{location}/taxonomies/{taxonomy}/policyTags/{policy_tag}".format(
- project=project, location=location, taxonomy=taxonomy, policy_tag=policy_tag,
+ expected = "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
)
- actual = PolicyTagManagerClient.policy_tag_path(
- project, location, taxonomy, policy_tag
+ actual = PolicyTagManagerClient.common_billing_account_path(billing_account)
+ assert expected == actual
+
+
+def test_parse_common_billing_account_path():
+ expected = {
+ "billing_account": "octopus",
+ }
+ path = PolicyTagManagerClient.common_billing_account_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = PolicyTagManagerClient.parse_common_billing_account_path(path)
+ assert expected == actual
+
+
+def test_common_folder_path():
+ folder = "oyster"
+
+ expected = "folders/{folder}".format(folder=folder,)
+ actual = PolicyTagManagerClient.common_folder_path(folder)
+ assert expected == actual
+
+
+def test_parse_common_folder_path():
+ expected = {
+ "folder": "nudibranch",
+ }
+ path = PolicyTagManagerClient.common_folder_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = PolicyTagManagerClient.parse_common_folder_path(path)
+ assert expected == actual
+
+
+def test_common_organization_path():
+ organization = "cuttlefish"
+
+ expected = "organizations/{organization}".format(organization=organization,)
+ actual = PolicyTagManagerClient.common_organization_path(organization)
+ assert expected == actual
+
+
+def test_parse_common_organization_path():
+ expected = {
+ "organization": "mussel",
+ }
+ path = PolicyTagManagerClient.common_organization_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = PolicyTagManagerClient.parse_common_organization_path(path)
+ assert expected == actual
+
+
+def test_common_project_path():
+ project = "winkle"
+
+ expected = "projects/{project}".format(project=project,)
+ actual = PolicyTagManagerClient.common_project_path(project)
+ assert expected == actual
+
+
+def test_parse_common_project_path():
+ expected = {
+ "project": "nautilus",
+ }
+ path = PolicyTagManagerClient.common_project_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = PolicyTagManagerClient.parse_common_project_path(path)
+ assert expected == actual
+
+
+def test_common_location_path():
+ project = "scallop"
+ location = "abalone"
+
+ expected = "projects/{project}/locations/{location}".format(
+ project=project, location=location,
)
+ actual = PolicyTagManagerClient.common_location_path(project, location)
assert expected == actual
-def test_parse_policy_tag_path():
+def test_parse_common_location_path():
expected = {
- "project": "oyster",
- "location": "nudibranch",
- "taxonomy": "cuttlefish",
- "policy_tag": "mussel",
+ "project": "squid",
+ "location": "clam",
}
- path = PolicyTagManagerClient.policy_tag_path(**expected)
+ path = PolicyTagManagerClient.common_location_path(**expected)
# Check that the path construction is reversible.
- actual = PolicyTagManagerClient.parse_policy_tag_path(path)
+ actual = PolicyTagManagerClient.parse_common_location_path(path)
assert expected == actual
+
+
+def test_client_withDEFAULT_CLIENT_INFO():
+ client_info = gapic_v1.client_info.ClientInfo()
+
+ with mock.patch.object(
+ transports.PolicyTagManagerTransport, "_prep_wrapped_messages"
+ ) as prep:
+ client = PolicyTagManagerClient(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
+
+ with mock.patch.object(
+ transports.PolicyTagManagerTransport, "_prep_wrapped_messages"
+ ) as prep:
+ transport_class = PolicyTagManagerClient.get_transport_class()
+ transport = transport_class(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
diff --git a/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py b/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py
index 9676d368..cb5be9a9 100644
--- a/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py
+++ b/tests/unit/gapic/datacatalog_v1beta1/test_policy_tag_manager_serialization.py
@@ -27,6 +27,7 @@
from google import auth
from google.api_core import client_options
from google.api_core import exceptions
+from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.auth import credentials
@@ -107,12 +108,12 @@ def test_policy_tag_manager_serialization_client_from_service_account_file(
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
- assert client._transport._credentials == creds
+ assert client.transport._credentials == creds
client = client_class.from_service_account_json("dummy/file/path.json")
- assert client._transport._credentials == creds
+ assert client.transport._credentials == creds
- assert client._transport._host == "datacatalog.googleapis.com:443"
+ assert client.transport._host == "datacatalog.googleapis.com:443"
def test_policy_tag_manager_serialization_client_get_transport_class():
@@ -176,14 +177,14 @@ def test_policy_tag_manager_serialization_client_client_options(
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- api_mtls_endpoint="squid.clam.whelk",
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}):
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
@@ -192,14 +193,14 @@ def test_policy_tag_manager_serialization_client_client_options(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}):
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
@@ -208,90 +209,185 @@ def test_policy_tag_manager_serialization_client_client_options(
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
- # "auto", and client_cert_source is provided.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+ # unsupported value.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+ with pytest.raises(MutualTLSChannelError):
+ client = client_class()
+
+ # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
+ ):
+ with pytest.raises(ValueError):
+ client = client_class()
+
+ # Check the case quota_project_id is provided
+ options = client_options.ClientOptions(quota_project_id="octopus")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id="octopus",
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name,use_client_cert_env",
+ [
+ (
+ PolicyTagManagerSerializationClient,
+ transports.PolicyTagManagerSerializationGrpcTransport,
+ "grpc",
+ "true",
+ ),
+ (
+ PolicyTagManagerSerializationAsyncClient,
+ transports.PolicyTagManagerSerializationGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "true",
+ ),
+ (
+ PolicyTagManagerSerializationClient,
+ transports.PolicyTagManagerSerializationGrpcTransport,
+ "grpc",
+ "false",
+ ),
+ (
+ PolicyTagManagerSerializationAsyncClient,
+ transports.PolicyTagManagerSerializationGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "false",
+ ),
+ ],
+)
+@mock.patch.object(
+ PolicyTagManagerSerializationClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(PolicyTagManagerSerializationClient),
+)
+@mock.patch.object(
+ PolicyTagManagerSerializationAsyncClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(PolicyTagManagerSerializationAsyncClient),
+)
+@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
+def test_policy_tag_manager_serialization_client_mtls_env_auto(
+ client_class, transport_class, transport_name, use_client_cert_env
+):
+ # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
+ # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
+
+ # Check the case client_cert_source is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
- patched.return_value = None
- client = client_class(client_options=options)
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=client.DEFAULT_MTLS_ENDPOINT,
- scopes=None,
- api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
- client_cert_source=client_cert_source_callback,
- quota_project_id=None,
- )
-
- # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
- # "auto", and default_client_cert_source is provided.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
- with mock.patch.object(transport_class, "__init__") as patched:
+ ssl_channel_creds = mock.Mock()
with mock.patch(
- "google.auth.transport.mtls.has_default_client_cert_source",
- return_value=True,
+ "grpc.ssl_channel_credentials", return_value=ssl_channel_creds
):
patched.return_value = None
- client = client_class()
+ client = client_class(client_options=options)
+
+ if use_client_cert_env == "false":
+ expected_ssl_channel_creds = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_ssl_channel_creds = ssl_channel_creds
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_MTLS_ENDPOINT,
+ host=expected_host,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=expected_ssl_channel_creds,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
- # "auto", but client_cert_source and default_client_cert_source are None.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
+ # Check the case ADC client cert is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
- "google.auth.transport.mtls.has_default_client_cert_source",
- return_value=False,
+ "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
):
- patched.return_value = None
- client = client_class()
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
- scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
- quota_project_id=None,
- )
-
- # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has
- # unsupported value.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}):
- with pytest.raises(MutualTLSChannelError):
- client = client_class()
-
- # Check the case quota_project_id is provided
- options = client_options.ClientOptions(quota_project_id="octopus")
- with mock.patch.object(transport_class, "__init__") as patched:
- patched.return_value = None
- client = client_class(client_options=options)
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
- scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
- quota_project_id="octopus",
- )
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.is_mtls",
+ new_callable=mock.PropertyMock,
+ ) as is_mtls_mock:
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.ssl_credentials",
+ new_callable=mock.PropertyMock,
+ ) as ssl_credentials_mock:
+ if use_client_cert_env == "false":
+ is_mtls_mock.return_value = False
+ ssl_credentials_mock.return_value = None
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_ssl_channel_creds = None
+ else:
+ is_mtls_mock.return_value = True
+ ssl_credentials_mock.return_value = mock.Mock()
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_ssl_channel_creds = (
+ ssl_credentials_mock.return_value
+ )
+
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ ssl_channel_credentials=expected_ssl_channel_creds,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
+ ):
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.is_mtls",
+ new_callable=mock.PropertyMock,
+ ) as is_mtls_mock:
+ is_mtls_mock.return_value = False
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
@pytest.mark.parametrize(
@@ -322,9 +418,9 @@ def test_policy_tag_manager_serialization_client_client_options_scopes(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -356,9 +452,9 @@ def test_policy_tag_manager_serialization_client_client_options_credentials_file
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -375,9 +471,9 @@ def test_policy_tag_manager_serialization_client_client_options_from_dict():
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- api_mtls_endpoint="squid.clam.whelk",
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -395,7 +491,7 @@ def test_import_taxonomies(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.import_taxonomies), "__call__"
+ type(client.transport.import_taxonomies), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = policytagmanagerserialization.ImportTaxonomiesResponse()
@@ -409,6 +505,7 @@ def test_import_taxonomies(
assert args[0] == policytagmanagerserialization.ImportTaxonomiesRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, policytagmanagerserialization.ImportTaxonomiesResponse)
@@ -417,18 +514,21 @@ def test_import_taxonomies_from_dict():
@pytest.mark.asyncio
-async def test_import_taxonomies_async(transport: str = "grpc_asyncio"):
+async def test_import_taxonomies_async(
+ transport: str = "grpc_asyncio",
+ request_type=policytagmanagerserialization.ImportTaxonomiesRequest,
+):
client = PolicyTagManagerSerializationAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = policytagmanagerserialization.ImportTaxonomiesRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.import_taxonomies), "__call__"
+ type(client.transport.import_taxonomies), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -441,12 +541,17 @@ async def test_import_taxonomies_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == policytagmanagerserialization.ImportTaxonomiesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, policytagmanagerserialization.ImportTaxonomiesResponse)
+@pytest.mark.asyncio
+async def test_import_taxonomies_async_from_dict():
+ await test_import_taxonomies_async(request_type=dict)
+
+
def test_import_taxonomies_field_headers():
client = PolicyTagManagerSerializationClient(
credentials=credentials.AnonymousCredentials(),
@@ -459,7 +564,7 @@ def test_import_taxonomies_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.import_taxonomies), "__call__"
+ type(client.transport.import_taxonomies), "__call__"
) as call:
call.return_value = policytagmanagerserialization.ImportTaxonomiesResponse()
@@ -488,7 +593,7 @@ async def test_import_taxonomies_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.import_taxonomies), "__call__"
+ type(client.transport.import_taxonomies), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
policytagmanagerserialization.ImportTaxonomiesResponse()
@@ -520,7 +625,7 @@ def test_export_taxonomies(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.export_taxonomies), "__call__"
+ type(client.transport.export_taxonomies), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = policytagmanagerserialization.ExportTaxonomiesResponse()
@@ -534,6 +639,7 @@ def test_export_taxonomies(
assert args[0] == policytagmanagerserialization.ExportTaxonomiesRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, policytagmanagerserialization.ExportTaxonomiesResponse)
@@ -542,18 +648,21 @@ def test_export_taxonomies_from_dict():
@pytest.mark.asyncio
-async def test_export_taxonomies_async(transport: str = "grpc_asyncio"):
+async def test_export_taxonomies_async(
+ transport: str = "grpc_asyncio",
+ request_type=policytagmanagerserialization.ExportTaxonomiesRequest,
+):
client = PolicyTagManagerSerializationAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = policytagmanagerserialization.ExportTaxonomiesRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.export_taxonomies), "__call__"
+ type(client.transport.export_taxonomies), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -566,12 +675,17 @@ async def test_export_taxonomies_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == policytagmanagerserialization.ExportTaxonomiesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, policytagmanagerserialization.ExportTaxonomiesResponse)
+@pytest.mark.asyncio
+async def test_export_taxonomies_async_from_dict():
+ await test_export_taxonomies_async(request_type=dict)
+
+
def test_export_taxonomies_field_headers():
client = PolicyTagManagerSerializationClient(
credentials=credentials.AnonymousCredentials(),
@@ -584,7 +698,7 @@ def test_export_taxonomies_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.export_taxonomies), "__call__"
+ type(client.transport.export_taxonomies), "__call__"
) as call:
call.return_value = policytagmanagerserialization.ExportTaxonomiesResponse()
@@ -613,7 +727,7 @@ async def test_export_taxonomies_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.export_taxonomies), "__call__"
+ type(client.transport.export_taxonomies), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
policytagmanagerserialization.ExportTaxonomiesResponse()
@@ -667,7 +781,7 @@ def test_transport_instance():
credentials=credentials.AnonymousCredentials(),
)
client = PolicyTagManagerSerializationClient(transport=transport)
- assert client._transport is transport
+ assert client.transport is transport
def test_transport_get_channel():
@@ -685,13 +799,28 @@ def test_transport_get_channel():
assert channel
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.PolicyTagManagerSerializationGrpcTransport,
+ transports.PolicyTagManagerSerializationGrpcAsyncIOTransport,
+ ],
+)
+def test_transport_adc(transport_class):
+ # Test default credentials are used if not provided.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport_class()
+ adc.assert_called_once()
+
+
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = PolicyTagManagerSerializationClient(
credentials=credentials.AnonymousCredentials(),
)
assert isinstance(
- client._transport, transports.PolicyTagManagerSerializationGrpcTransport,
+ client.transport, transports.PolicyTagManagerSerializationGrpcTransport,
)
@@ -744,6 +873,17 @@ def test_policy_tag_manager_serialization_base_transport_with_credentials_file()
)
+def test_policy_tag_manager_serialization_base_transport_with_adc():
+ # Test the default credentials are used if credentials and credentials_file are None.
+ with mock.patch.object(auth, "default") as adc, mock.patch(
+ "google.cloud.datacatalog_v1beta1.services.policy_tag_manager_serialization.transports.PolicyTagManagerSerializationTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport = transports.PolicyTagManagerSerializationTransport()
+ adc.assert_called_once()
+
+
def test_policy_tag_manager_serialization_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(auth, "default") as adc:
@@ -776,7 +916,7 @@ def test_policy_tag_manager_serialization_host_no_port():
api_endpoint="datacatalog.googleapis.com"
),
)
- assert client._transport._host == "datacatalog.googleapis.com:443"
+ assert client.transport._host == "datacatalog.googleapis.com:443"
def test_policy_tag_manager_serialization_host_with_port():
@@ -786,182 +926,269 @@ def test_policy_tag_manager_serialization_host_with_port():
api_endpoint="datacatalog.googleapis.com:8000"
),
)
- assert client._transport._host == "datacatalog.googleapis.com:8000"
+ assert client.transport._host == "datacatalog.googleapis.com:8000"
def test_policy_tag_manager_serialization_grpc_transport_channel():
channel = grpc.insecure_channel("http://localhost/")
- # Check that if channel is provided, mtls endpoint and client_cert_source
- # won't be used.
- callback = mock.MagicMock()
+ # Check that channel is used if provided.
transport = transports.PolicyTagManagerSerializationGrpcTransport(
- host="squid.clam.whelk",
- channel=channel,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=callback,
+ host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
- assert not callback.called
+ assert transport._ssl_channel_credentials == None
def test_policy_tag_manager_serialization_grpc_asyncio_transport_channel():
channel = aio.insecure_channel("http://localhost/")
- # Check that if channel is provided, mtls endpoint and client_cert_source
- # won't be used.
- callback = mock.MagicMock()
+ # Check that channel is used if provided.
transport = transports.PolicyTagManagerSerializationGrpcAsyncIOTransport(
- host="squid.clam.whelk",
- channel=channel,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=callback,
+ host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
- assert not callback.called
+ assert transport._ssl_channel_credentials == None
-@mock.patch("grpc.ssl_channel_credentials", autospec=True)
-@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True)
-def test_policy_tag_manager_serialization_grpc_transport_channel_mtls_with_client_cert_source(
- grpc_create_channel, grpc_ssl_channel_cred
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.PolicyTagManagerSerializationGrpcTransport,
+ transports.PolicyTagManagerSerializationGrpcAsyncIOTransport,
+ ],
+)
+def test_policy_tag_manager_serialization_transport_channel_mtls_with_client_cert_source(
+ transport_class,
):
- # Check that if channel is None, but api_mtls_endpoint and client_cert_source
- # are provided, then a mTLS channel will be created.
- mock_cred = mock.Mock()
+ with mock.patch(
+ "grpc.ssl_channel_credentials", autospec=True
+ ) as grpc_ssl_channel_cred:
+ with mock.patch.object(
+ transport_class, "create_channel", autospec=True
+ ) as grpc_create_channel:
+ mock_ssl_cred = mock.Mock()
+ grpc_ssl_channel_cred.return_value = mock_ssl_cred
+
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+
+ cred = credentials.AnonymousCredentials()
+ with pytest.warns(DeprecationWarning):
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (cred, None)
+ transport = transport_class(
+ host="squid.clam.whelk",
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=client_cert_source_callback,
+ )
+ adc.assert_called_once()
+
+ grpc_ssl_channel_cred.assert_called_once_with(
+ certificate_chain=b"cert bytes", private_key=b"key bytes"
+ )
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+ assert transport._ssl_channel_credentials == mock_ssl_cred
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.PolicyTagManagerSerializationGrpcTransport,
+ transports.PolicyTagManagerSerializationGrpcAsyncIOTransport,
+ ],
+)
+def test_policy_tag_manager_serialization_transport_channel_mtls_with_adc(
+ transport_class,
+):
mock_ssl_cred = mock.Mock()
- grpc_ssl_channel_cred.return_value = mock_ssl_cred
+ with mock.patch.multiple(
+ "google.auth.transport.grpc.SslCredentials",
+ __init__=mock.Mock(return_value=None),
+ ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
+ ):
+ with mock.patch.object(
+ transport_class, "create_channel", autospec=True
+ ) as grpc_create_channel:
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+ mock_cred = mock.Mock()
+
+ with pytest.warns(DeprecationWarning):
+ transport = transport_class(
+ host="squid.clam.whelk",
+ credentials=mock_cred,
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=None,
+ )
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=mock_cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ )
+ assert transport.grpc_channel == mock_grpc_channel
- transport = transports.PolicyTagManagerSerializationGrpcTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=client_cert_source_callback,
- )
- grpc_ssl_channel_cred.assert_called_once_with(
- certificate_chain=b"cert bytes", private_key=b"key bytes"
+
+def test_taxonomy_path():
+ project = "squid"
+ location = "clam"
+ taxonomy = "whelk"
+
+ expected = "projects/{project}/locations/{location}/taxonomies/{taxonomy}".format(
+ project=project, location=location, taxonomy=taxonomy,
)
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
+ actual = PolicyTagManagerSerializationClient.taxonomy_path(
+ project, location, taxonomy
)
- assert transport.grpc_channel == mock_grpc_channel
+ assert expected == actual
-@mock.patch("grpc.ssl_channel_credentials", autospec=True)
-@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True)
-def test_policy_tag_manager_serialization_grpc_asyncio_transport_channel_mtls_with_client_cert_source(
- grpc_create_channel, grpc_ssl_channel_cred
-):
- # Check that if channel is None, but api_mtls_endpoint and client_cert_source
- # are provided, then a mTLS channel will be created.
- mock_cred = mock.Mock()
+def test_parse_taxonomy_path():
+ expected = {
+ "project": "octopus",
+ "location": "oyster",
+ "taxonomy": "nudibranch",
+ }
+ path = PolicyTagManagerSerializationClient.taxonomy_path(**expected)
- mock_ssl_cred = mock.Mock()
- grpc_ssl_channel_cred.return_value = mock_ssl_cred
+ # Check that the path construction is reversible.
+ actual = PolicyTagManagerSerializationClient.parse_taxonomy_path(path)
+ assert expected == actual
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
- transport = transports.PolicyTagManagerSerializationGrpcAsyncIOTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=client_cert_source_callback,
+def test_common_billing_account_path():
+ billing_account = "cuttlefish"
+
+ expected = "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
)
- grpc_ssl_channel_cred.assert_called_once_with(
- certificate_chain=b"cert bytes", private_key=b"key bytes"
+ actual = PolicyTagManagerSerializationClient.common_billing_account_path(
+ billing_account
)
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
+ assert expected == actual
+
+
+def test_parse_common_billing_account_path():
+ expected = {
+ "billing_account": "mussel",
+ }
+ path = PolicyTagManagerSerializationClient.common_billing_account_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = PolicyTagManagerSerializationClient.parse_common_billing_account_path(path)
+ assert expected == actual
+
+
+def test_common_folder_path():
+ folder = "winkle"
+
+ expected = "folders/{folder}".format(folder=folder,)
+ actual = PolicyTagManagerSerializationClient.common_folder_path(folder)
+ assert expected == actual
+
+
+def test_parse_common_folder_path():
+ expected = {
+ "folder": "nautilus",
+ }
+ path = PolicyTagManagerSerializationClient.common_folder_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = PolicyTagManagerSerializationClient.parse_common_folder_path(path)
+ assert expected == actual
+
+
+def test_common_organization_path():
+ organization = "scallop"
+
+ expected = "organizations/{organization}".format(organization=organization,)
+ actual = PolicyTagManagerSerializationClient.common_organization_path(organization)
+ assert expected == actual
+
+
+def test_parse_common_organization_path():
+ expected = {
+ "organization": "abalone",
+ }
+ path = PolicyTagManagerSerializationClient.common_organization_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = PolicyTagManagerSerializationClient.parse_common_organization_path(path)
+ assert expected == actual
+
+
+def test_common_project_path():
+ project = "squid"
+
+ expected = "projects/{project}".format(project=project,)
+ actual = PolicyTagManagerSerializationClient.common_project_path(project)
+ assert expected == actual
+
+
+def test_parse_common_project_path():
+ expected = {
+ "project": "clam",
+ }
+ path = PolicyTagManagerSerializationClient.common_project_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = PolicyTagManagerSerializationClient.parse_common_project_path(path)
+ assert expected == actual
+
+
+def test_common_location_path():
+ project = "whelk"
+ location = "octopus"
+
+ expected = "projects/{project}/locations/{location}".format(
+ project=project, location=location,
)
- assert transport.grpc_channel == mock_grpc_channel
+ actual = PolicyTagManagerSerializationClient.common_location_path(project, location)
+ assert expected == actual
-@pytest.mark.parametrize(
- "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"]
-)
-@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True)
-def test_policy_tag_manager_serialization_grpc_transport_channel_mtls_with_adc(
- grpc_create_channel, api_mtls_endpoint
-):
- # Check that if channel and client_cert_source are None, but api_mtls_endpoint
- # is provided, then a mTLS channel will be created with SSL ADC.
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
+def test_parse_common_location_path():
+ expected = {
+ "project": "oyster",
+ "location": "nudibranch",
+ }
+ path = PolicyTagManagerSerializationClient.common_location_path(**expected)
- # Mock google.auth.transport.grpc.SslCredentials class.
- mock_ssl_cred = mock.Mock()
- with mock.patch.multiple(
- "google.auth.transport.grpc.SslCredentials",
- __init__=mock.Mock(return_value=None),
- ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
- ):
- mock_cred = mock.Mock()
- transport = transports.PolicyTagManagerSerializationGrpcTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint=api_mtls_endpoint,
- client_cert_source=None,
- )
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
- )
- assert transport.grpc_channel == mock_grpc_channel
+ # Check that the path construction is reversible.
+ actual = PolicyTagManagerSerializationClient.parse_common_location_path(path)
+ assert expected == actual
-@pytest.mark.parametrize(
- "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"]
-)
-@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True)
-def test_policy_tag_manager_serialization_grpc_asyncio_transport_channel_mtls_with_adc(
- grpc_create_channel, api_mtls_endpoint
-):
- # Check that if channel and client_cert_source are None, but api_mtls_endpoint
- # is provided, then a mTLS channel will be created with SSL ADC.
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
+def test_client_withDEFAULT_CLIENT_INFO():
+ client_info = gapic_v1.client_info.ClientInfo()
- # Mock google.auth.transport.grpc.SslCredentials class.
- mock_ssl_cred = mock.Mock()
- with mock.patch.multiple(
- "google.auth.transport.grpc.SslCredentials",
- __init__=mock.Mock(return_value=None),
- ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
- ):
- mock_cred = mock.Mock()
- transport = transports.PolicyTagManagerSerializationGrpcAsyncIOTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint=api_mtls_endpoint,
- client_cert_source=None,
+ with mock.patch.object(
+ transports.PolicyTagManagerSerializationTransport, "_prep_wrapped_messages"
+ ) as prep:
+ client = PolicyTagManagerSerializationClient(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
)
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
+ prep.assert_called_once_with(client_info)
+
+ with mock.patch.object(
+ transports.PolicyTagManagerSerializationTransport, "_prep_wrapped_messages"
+ ) as prep:
+ transport_class = PolicyTagManagerSerializationClient.get_transport_class()
+ transport = transport_class(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
)
- assert transport.grpc_channel == mock_grpc_channel
+ prep.assert_called_once_with(client_info)