From 4f3148e93ec3dfc9395aa38a3afc62498500a055 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Wed, 2 Dec 2020 08:59:44 -0800 Subject: [PATCH] fix!: rename fields that collide with builtins; retrieve job config for risk analysis jobs (#75) fix: retrieve job config for risk analysis jobs fix!: rename fields that collide with builtins. * `ByteContentItem.type` -> `ByteContentItem.type_` * `MetadataLocation.type` -> `MetadataLocation.type_` * `Container.type` -> `Container.type_` * `Bucket.min` -> `Bucket.min_` * `Bucket.max `-> `Bucket.max_` * `DlpJob.type` -> `DlpJob.type_` * `GetDlpJobRequest.type` -> `GetDlpJobRequest.type_` --- .github/snippet-bot.yml | 0 .github/sync-repo-settings.yaml | 13 + .kokoro/docs/common.cfg | 2 +- .kokoro/populate-secrets.sh | 43 + .kokoro/release/common.cfg | 50 +- .kokoro/samples/python3.6/common.cfg | 6 + .kokoro/samples/python3.7/common.cfg | 6 + .kokoro/samples/python3.8/common.cfg | 6 + .kokoro/test-samples.sh | 8 +- .kokoro/trampoline.sh | 15 +- CODE_OF_CONDUCT.md | 123 +- CONTRIBUTING.rst | 19 - docs/conf.py | 4 +- docs/dlp_v2/types.rst | 1 + google/cloud/dlp_v2/proto/dlp.proto | 314 ++- google/cloud/dlp_v2/proto/storage.proto | 40 +- .../services/dlp_service/async_client.py | 469 ++++- .../dlp_v2/services/dlp_service/client.py | 463 ++++- .../services/dlp_service/transports/base.py | 88 +- .../services/dlp_service/transports/grpc.py | 72 +- .../dlp_service/transports/grpc_asyncio.py | 65 +- google/cloud/dlp_v2/types/dlp.py | 528 +++-- google/cloud/dlp_v2/types/storage.py | 82 +- noxfile.py | 10 +- samples/snippets/README.rst | 25 +- samples/snippets/inspect_content.py | 2 +- samples/snippets/jobs.py | 2 +- samples/snippets/noxfile.py | 50 +- samples/snippets/redact.py | 4 +- scripts/decrypt-secrets.sh | 15 +- scripts/fixup_dlp_v2_keywords.py | 3 +- scripts/readme-gen/templates/README.tmpl.rst | 6 +- .../templates/install_deps.tmpl.rst | 5 +- synth.metadata | 107 +- tests/unit/gapic/dlp_v2/test_dlp_service.py | 1767 +++++++++++------ 35 files changed, 3113 insertions(+), 1300 deletions(-) create mode 100644 .github/snippet-bot.yml create mode 100644 .github/sync-repo-settings.yaml create mode 100755 .kokoro/populate-secrets.sh diff --git a/.github/snippet-bot.yml b/.github/snippet-bot.yml new file mode 100644 index 00000000..e69de29b diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml new file mode 100644 index 00000000..808cf3c7 --- /dev/null +++ b/.github/sync-repo-settings.yaml @@ -0,0 +1,13 @@ +# https://github.com/googleapis/repo-automation-bots/tree/master/packages/sync-repo-settings +# Rules for master branch protection +branchProtectionRules: +# Identifies the protection rule pattern. Name of the branch to be protected. +# Defaults to `master` +- pattern: master + requiredStatusCheckContexts: + - 'Kokoro' + - 'cla/google' + - 'Samples - Lint' + - 'Samples - Python 3.6' + - 'Samples - Python 3.7' + - 'Samples - Python 3.8' \ No newline at end of file diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg index aa9c96fe..07296fb7 100644 --- a/.kokoro/docs/common.cfg +++ b/.kokoro/docs/common.cfg @@ -30,7 +30,7 @@ env_vars: { env_vars: { key: "V2_STAGING_BUCKET" - value: "docs-staging-v2-staging" + value: "docs-staging-v2" } # It will upload the docker image after successful builds. diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh new file mode 100755 index 00000000..f5251425 --- /dev/null +++ b/.kokoro/populate-secrets.sh @@ -0,0 +1,43 @@ +#!/bin/bash +# Copyright 2020 Google LLC. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +set -eo pipefail + +function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;} +function msg { println "$*" >&2 ;} +function println { printf '%s\n' "$(now) $*" ;} + + +# Populates requested secrets set in SECRET_MANAGER_KEYS from service account: +# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com +SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager" +msg "Creating folder on disk for secrets: ${SECRET_LOCATION}" +mkdir -p ${SECRET_LOCATION} +for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g") +do + msg "Retrieving secret ${key}" + docker run --entrypoint=gcloud \ + --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \ + gcr.io/google.com/cloudsdktool/cloud-sdk \ + secrets versions access latest \ + --project cloud-devrel-kokoro-resources \ + --secret ${key} > \ + "${SECRET_LOCATION}/${key}" + if [[ $? == 0 ]]; then + msg "Secret written to ${SECRET_LOCATION}/${key}" + else + msg "Error retrieving secret ${key}" + fi +done diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index 5eb8c07e..e603ebc5 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -23,42 +23,18 @@ env_vars: { value: "github/python-dlp/.kokoro/release.sh" } -# Fetch the token needed for reporting release status to GitHub -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "yoshi-automation-github-key" - } - } -} - -# Fetch PyPI password -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "google_cloud_pypi_password" - } - } -} - -# Fetch magictoken to use with Magic Github Proxy -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "releasetool-magictoken" - } - } +# Fetch PyPI password +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google_cloud_pypi_password" + } + } } -# Fetch api key to use with Magic Github Proxy -before_action { - fetch_keystore { - keystore_resource { - keystore_config_id: 73713 - keyname: "magic-github-proxy-api-key" - } - } -} +# Tokens needed to report release status back to GitHub +env_vars: { + key: "SECRET_MANAGER_KEYS" + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg index 79abd4cc..e29bea32 100644 --- a/.kokoro/samples/python3.6/common.cfg +++ b/.kokoro/samples/python3.6/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.6" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py36" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-dlp/.kokoro/test-samples.sh" diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg index 357e36be..f2b1803d 100644 --- a/.kokoro/samples/python3.7/common.cfg +++ b/.kokoro/samples/python3.7/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.7" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py37" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-dlp/.kokoro/test-samples.sh" diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg index aefdbeac..0bfe6bf2 100644 --- a/.kokoro/samples/python3.8/common.cfg +++ b/.kokoro/samples/python3.8/common.cfg @@ -13,6 +13,12 @@ env_vars: { value: "py-3.8" } +# Declare build specific Cloud project. +env_vars: { + key: "BUILD_SPECIFIC_GCLOUD_PROJECT" + value: "python-docs-samples-tests-py38" +} + env_vars: { key: "TRAMPOLINE_BUILD_FILE" value: "github/python-dlp/.kokoro/test-samples.sh" diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh index 0d3b153a..6f17e226 100755 --- a/.kokoro/test-samples.sh +++ b/.kokoro/test-samples.sh @@ -28,6 +28,12 @@ if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then git checkout $LATEST_RELEASE fi +# Exit early if samples directory doesn't exist +if [ ! -d "./samples" ]; then + echo "No tests run. `./samples` not found" + exit 0 +fi + # Disable buffering, so that the logs stream through. export PYTHONUNBUFFERED=1 @@ -101,4 +107,4 @@ cd "$ROOT" # Workaround for Kokoro permissions issue: delete secrets rm testing/{test-env.sh,client-secrets.json,service-account.json} -exit "$RTN" \ No newline at end of file +exit "$RTN" diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh index e8c4251f..f39236e9 100755 --- a/.kokoro/trampoline.sh +++ b/.kokoro/trampoline.sh @@ -15,9 +15,14 @@ set -eo pipefail -python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" || ret_code=$? +# Always run the cleanup script, regardless of the success of bouncing into +# the container. +function cleanup() { + chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh + echo "cleanup"; +} +trap cleanup EXIT -chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh -${KOKORO_GFILE_DIR}/trampoline_cleanup.sh || true - -exit ${ret_code} +$(dirname $0)/populate-secrets.sh # Secret Manager secrets. +python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" \ No newline at end of file diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index b3d1f602..039f4368 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -1,44 +1,95 @@ -# Contributor Code of Conduct +# Code of Conduct -As contributors and maintainers of this project, -and in the interest of fostering an open and welcoming community, -we pledge to respect all people who contribute through reporting issues, -posting feature requests, updating documentation, -submitting pull requests or patches, and other activities. +## Our Pledge -We are committed to making participation in this project -a harassment-free experience for everyone, -regardless of level of experience, gender, gender identity and expression, -sexual orientation, disability, personal appearance, -body size, race, ethnicity, age, religion, or nationality. +In the interest of fostering an open and welcoming environment, we as +contributors and maintainers pledge to making participation in our project and +our community a harassment-free experience for everyone, regardless of age, body +size, disability, ethnicity, gender identity and expression, level of +experience, education, socio-economic status, nationality, personal appearance, +race, religion, or sexual identity and orientation. + +## Our Standards + +Examples of behavior that contributes to creating a positive environment +include: + +* Using welcoming and inclusive language +* Being respectful of differing viewpoints and experiences +* Gracefully accepting constructive criticism +* Focusing on what is best for the community +* Showing empathy towards other community members Examples of unacceptable behavior by participants include: -* The use of sexualized language or imagery -* Personal attacks -* Trolling or insulting/derogatory comments -* Public or private harassment -* Publishing other's private information, -such as physical or electronic -addresses, without explicit permission -* Other unethical or unprofessional conduct. +* The use of sexualized language or imagery and unwelcome sexual attention or + advances +* Trolling, insulting/derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or electronic + address, without explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Our Responsibilities + +Project maintainers are responsible for clarifying the standards of acceptable +behavior and are expected to take appropriate and fair corrective action in +response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject -comments, commits, code, wiki edits, issues, and other contributions -that are not aligned to this Code of Conduct. -By adopting this Code of Conduct, -project maintainers commit themselves to fairly and consistently -applying these principles to every aspect of managing this project. -Project maintainers who do not follow or enforce the Code of Conduct -may be permanently removed from the project team. - -This code of conduct applies both within project spaces and in public spaces -when an individual is representing the project or its community. - -Instances of abusive, harassing, or otherwise unacceptable behavior -may be reported by opening an issue -or contacting one or more of the project maintainers. - -This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0, -available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/) +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, or to ban temporarily or permanently any +contributor for other behaviors that they deem inappropriate, threatening, +offensive, or harmful. + +## Scope + +This Code of Conduct applies both within project spaces and in public spaces +when an individual is representing the project or its community. Examples of +representing a project or community include using an official project e-mail +address, posting via an official social media account, or acting as an appointed +representative at an online or offline event. Representation of a project may be +further defined and clarified by project maintainers. + +This Code of Conduct also applies outside the project spaces when the Project +Steward has a reasonable belief that an individual's behavior may have a +negative impact on the project or its community. + +## Conflict Resolution + +We do not believe that all conflict is bad; healthy debate and disagreement +often yield positive results. However, it is never okay to be disrespectful or +to engage in behavior that violates the project’s code of conduct. + +If you see someone violating the code of conduct, you are encouraged to address +the behavior directly with those involved. Many issues can be resolved quickly +and easily, and this gives people more control over the outcome of their +dispute. If you are unable to resolve the matter for any reason, or if the +behavior is threatening or harassing, report it. We are dedicated to providing +an environment where participants feel welcome and safe. + + +Reports should be directed to *googleapis-stewards@google.com*, the +Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to +receive and address reported violations of the code of conduct. They will then +work with a committee consisting of representatives from the Open Source +Programs Office and the Google Open Source Strategy team. If for any reason you +are uncomfortable reaching out to the Project Steward, please email +opensource@google.com. + +We will investigate every complaint, but you may not receive a direct response. +We will use our discretion in determining when and how to follow up on reported +incidents, which may range from not taking action to permanent expulsion from +the project and project-sponsored spaces. We will notify the accused of the +report and provide them an opportunity to discuss it before any action is taken. +The identity of the reporter will be omitted from the details of the report +supplied to the accused. In potentially harmful situations, such as ongoing +harassment or threats to anyone's safety, we may take action without notice. + +## Attribution + +This Code of Conduct is adapted from the Contributor Covenant, version 1.4, +available at +https://www.contributor-covenant.org/version/1/4/code-of-conduct.html \ No newline at end of file diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index d2f63272..87ac6da7 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -80,25 +80,6 @@ We use `nox `__ to instrument our tests. .. nox: https://pypi.org/project/nox/ -Note on Editable Installs / Develop Mode -======================================== - -- As mentioned previously, using ``setuptools`` in `develop mode`_ - or a ``pip`` `editable install`_ is not possible with this - library. This is because this library uses `namespace packages`_. - For context see `Issue #2316`_ and the relevant `PyPA issue`_. - - Since ``editable`` / ``develop`` mode can't be used, packages - need to be installed directly. Hence your changes to the source - tree don't get incorporated into the **already installed** - package. - -.. _namespace packages: https://www.python.org/dev/peps/pep-0420/ -.. _Issue #2316: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2316 -.. _PyPA issue: https://github.com/pypa/packaging-problems/issues/12 -.. _develop mode: https://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode -.. _editable install: https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs - ***************************************** I'm getting weird errors... Can you help? ***************************************** diff --git a/docs/conf.py b/docs/conf.py index cc9cc348..b6f3b255 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -29,7 +29,7 @@ # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" +needs_sphinx = "1.5.5" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom @@ -39,6 +39,7 @@ "sphinx.ext.autosummary", "sphinx.ext.intersphinx", "sphinx.ext.coverage", + "sphinx.ext.doctest", "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", @@ -342,6 +343,7 @@ "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), "grpc": ("https://grpc.io/grpc/python/", None), + "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), } diff --git a/docs/dlp_v2/types.rst b/docs/dlp_v2/types.rst index 0ce55310..5470b717 100644 --- a/docs/dlp_v2/types.rst +++ b/docs/dlp_v2/types.rst @@ -3,3 +3,4 @@ Types for Google Cloud Dlp v2 API .. automodule:: google.cloud.dlp_v2.types :members: + :show-inheritance: diff --git a/google/cloud/dlp_v2/proto/dlp.proto b/google/cloud/dlp_v2/proto/dlp.proto index 781ac1c4..3fb0c219 100644 --- a/google/cloud/dlp_v2/proto/dlp.proto +++ b/google/cloud/dlp_v2/proto/dlp.proto @@ -1154,9 +1154,22 @@ message RedactImageRequest { Color redaction_color = 3; } - // The parent resource name. - // - Format:projects/[PROJECT-ID] - // - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + // Parent resource name. + // + // The format of this value varies depending on whether you have [specified a + // processing + // location](https://cloud.google.com/dlp/docs/specifying-location): + // + // + Projects scope, location specified:
+ // `projects/`PROJECT_ID`/locations/`LOCATION_ID + // + Projects scope, no location specified (defaults to global):
+ // `projects/`PROJECT_ID + // + // The following example `parent` string specifies a parent project with the + // identifier `example-project`, and specifies the `europe-west3` location + // for processing data: + // + // parent=projects/example-project/locations/europe-west3 string parent = 1 [(google.api.resource_reference) = { child_type: "dlp.googleapis.com/DlpContent" }]; @@ -1207,8 +1220,21 @@ message RedactImageResponse { // Request to de-identify a list of items. message DeidentifyContentRequest { // Parent resource name. - // - Format:projects/[PROJECT-ID] - // - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + // + // The format of this value varies depending on whether you have [specified a + // processing + // location](https://cloud.google.com/dlp/docs/specifying-location): + // + // + Projects scope, location specified:
+ // `projects/`PROJECT_ID`/locations/`LOCATION_ID + // + Projects scope, no location specified (defaults to global):
+ // `projects/`PROJECT_ID + // + // The following example `parent` string specifies a parent project with the + // identifier `example-project`, and specifies the `europe-west3` location + // for processing data: + // + // parent=projects/example-project/locations/europe-west3 string parent = 1 [(google.api.resource_reference) = { child_type: "dlp.googleapis.com/DlpContent" }]; @@ -1255,9 +1281,22 @@ message DeidentifyContentResponse { // Request to re-identify an item. message ReidentifyContentRequest { - // Required. The parent resource name. - // - Format:projects/[PROJECT-ID] - // - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + // Required. Parent resource name. + // + // The format of this value varies depending on whether you have [specified a + // processing + // location](https://cloud.google.com/dlp/docs/specifying-location): + // + // + Projects scope, location specified:
+ // `projects/`PROJECT_ID`/locations/`LOCATION_ID + // + Projects scope, no location specified (defaults to global):
+ // `projects/`PROJECT_ID + // + // The following example `parent` string specifies a parent project with the + // identifier `example-project`, and specifies the `europe-west3` location + // for processing data: + // + // parent=projects/example-project/locations/europe-west3 string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { @@ -1292,10 +1331,11 @@ message ReidentifyContentRequest { // Template to use. References an instance of `DeidentifyTemplate`. // Any configuration directly specified in `reidentify_config` or - // `inspect_config` will override those set in the template. Singular fields - // that are set in this request will replace their corresponding fields in the - // template. Repeated fields are appended. Singular sub-messages and groups - // are recursively merged. + // `inspect_config` will override those set in the template. The + // `DeidentifyTemplate` used must include only reversible transformations. + // Singular fields that are set in this request will replace their + // corresponding fields in the template. Repeated fields are appended. + // Singular sub-messages and groups are recursively merged. string reidentify_template_name = 6; // Deprecated. This field has no effect. @@ -1314,8 +1354,21 @@ message ReidentifyContentResponse { // Request to search for potentially sensitive info in a ContentItem. message InspectContentRequest { // Parent resource name. - // - Format:projects/[PROJECT-ID] - // - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + // + // The format of this value varies depending on whether you have [specified a + // processing + // location](https://cloud.google.com/dlp/docs/specifying-location): + // + // + Projects scope, location specified:
+ // `projects/`PROJECT_ID`/locations/`LOCATION_ID + // + Projects scope, no location specified (defaults to global):
+ // `projects/`PROJECT_ID + // + // The following example `parent` string specifies a parent project with the + // identifier `example-project`, and specifies the `europe-west3` location + // for processing data: + // + // parent=projects/example-project/locations/europe-west3 string parent = 1 [(google.api.resource_reference) = { child_type: "dlp.googleapis.com/DlpContent" }]; @@ -1484,7 +1537,10 @@ message InfoTypeDescription { // Request for the list of infoTypes. message ListInfoTypesRequest { // The parent resource name. - // - Format:locations/[LOCATION-ID] + // + // The format of this value is as follows: + // + // locations/LOCATION_ID string parent = 4; // BCP-47 language code for localized infoType friendly @@ -1977,6 +2033,12 @@ message AnalyzeDataSourceRiskDetails { repeated DeltaPresenceEstimationHistogramBucket delta_presence_estimation_histogram = 1; } + // Risk analysis options. + message RequestedRiskAnalysisOptions { + // The job config for the risk job. + RiskAnalysisJobConfig job_config = 1; + } + // Privacy metric to compute. PrivacyMetric requested_privacy_metric = 1; @@ -2003,6 +2065,9 @@ message AnalyzeDataSourceRiskDetails { // Delta-presence result DeltaPresenceEstimationResult delta_presence_estimation_result = 9; } + + // The configuration used for this job. + RequestedRiskAnalysisOptions requested_options = 10; } // A value of a field, including its frequency. @@ -2490,6 +2555,9 @@ message CryptoReplaceFfxFpeConfig { // Number of characters must be in the range [2, 95]. // This must be encoded as ASCII. // The order of characters does not matter. + // The full list of allowed characters is: + // 0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz + // ~`!@#$%^&*()_-+={[}]|\:;"'<,>.?/ string custom_alphabet = 5; // The native way to select the alphabet. Must be in the range [2, 95]. @@ -3075,10 +3143,25 @@ message Action { // Request message for CreateInspectTemplate. message CreateInspectTemplateRequest { // Required. Parent resource name. - // - Format:projects/[PROJECT-ID] - // - Format:organizations/[ORGANIZATION-ID] - // - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] - // - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + // + // The format of this value varies depending on the scope of the request + // (project or organization) and whether you have [specified a processing + // location](https://cloud.google.com/dlp/docs/specifying-location): + // + // + Projects scope, location specified:
+ // `projects/`PROJECT_ID`/locations/`LOCATION_ID + // + Projects scope, no location specified (defaults to global):
+ // `projects/`PROJECT_ID + // + Organizations scope, location specified:
+ // `organizations/`ORG_ID`/locations/`LOCATION_ID + // + Organizations scope, no location specified (defaults to global):
+ // `organizations/`ORG_ID + // + // The following example `parent` string specifies a parent project with the + // identifier `example-project`, and specifies the `europe-west3` location + // for processing data: + // + // parent=projects/example-project/locations/europe-west3 string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { @@ -3091,7 +3174,7 @@ message CreateInspectTemplateRequest { // The template id can contain uppercase and lowercase letters, // numbers, and hyphens; that is, it must match the regular - // expression: `[a-zA-Z\\d-_]+`. The maximum length is 100 + // expression: `[a-zA-Z\d-_]+`. The maximum length is 100 // characters. Can be empty to allow the system to generate one. string template_id = 3; @@ -3134,10 +3217,25 @@ message GetInspectTemplateRequest { // Request message for ListInspectTemplates. message ListInspectTemplatesRequest { // Required. Parent resource name. - // - Format:projects/[PROJECT-ID] - // - Format:organizations/[ORGANIZATION-ID] - // - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] - // - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + // + // The format of this value varies depending on the scope of the request + // (project or organization) and whether you have [specified a processing + // location](https://cloud.google.com/dlp/docs/specifying-location): + // + // + Projects scope, location specified:
+ // `projects/`PROJECT_ID`/locations/`LOCATION_ID + // + Projects scope, no location specified (defaults to global):
+ // `projects/`PROJECT_ID + // + Organizations scope, location specified:
+ // `organizations/`ORG_ID`/locations/`LOCATION_ID + // + Organizations scope, no location specified (defaults to global):
+ // `organizations/`ORG_ID + // + // The following example `parent` string specifies a parent project with the + // identifier `example-project`, and specifies the `europe-west3` location + // for processing data: + // + // parent=projects/example-project/locations/europe-west3 string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { @@ -3198,8 +3296,21 @@ message DeleteInspectTemplateRequest { // Request message for CreateJobTrigger. message CreateJobTriggerRequest { // Required. Parent resource name. - // - Format:projects/[PROJECT-ID] - // - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + // + // The format of this value varies depending on whether you have [specified a + // processing + // location](https://cloud.google.com/dlp/docs/specifying-location): + // + // + Projects scope, location specified:
+ // `projects/`PROJECT_ID`/locations/`LOCATION_ID + // + Projects scope, no location specified (defaults to global):
+ // `projects/`PROJECT_ID + // + // The following example `parent` string specifies a parent project with the + // identifier `example-project`, and specifies the `europe-west3` location + // for processing data: + // + // parent=projects/example-project/locations/europe-west3 string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { @@ -3212,7 +3323,7 @@ message CreateJobTriggerRequest { // The trigger id can contain uppercase and lowercase letters, // numbers, and hyphens; that is, it must match the regular - // expression: `[a-zA-Z\\d-_]+`. The maximum length is 100 + // expression: `[a-zA-Z\d-_]+`. The maximum length is 100 // characters. Can be empty to allow the system to generate one. string trigger_id = 3; @@ -3267,8 +3378,21 @@ message GetJobTriggerRequest { // Storage. message CreateDlpJobRequest { // Required. Parent resource name. - // - Format:projects/[PROJECT-ID] - // - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + // + // The format of this value varies depending on whether you have [specified a + // processing + // location](https://cloud.google.com/dlp/docs/specifying-location): + // + // + Projects scope, location specified:
+ // `projects/`PROJECT_ID`/locations/`LOCATION_ID + // + Projects scope, no location specified (defaults to global):
+ // `projects/`PROJECT_ID + // + // The following example `parent` string specifies a parent project with the + // identifier `example-project`, and specifies the `europe-west3` location + // for processing data: + // + // parent=projects/example-project/locations/europe-west3 string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { @@ -3287,7 +3411,7 @@ message CreateDlpJobRequest { // The job id can contain uppercase and lowercase letters, // numbers, and hyphens; that is, it must match the regular - // expression: `[a-zA-Z\\d-_]+`. The maximum length is 100 + // expression: `[a-zA-Z\d-_]+`. The maximum length is 100 // characters. Can be empty to allow the system to generate one. string job_id = 4; @@ -3298,8 +3422,21 @@ message CreateDlpJobRequest { // Request message for ListJobTriggers. message ListJobTriggersRequest { // Required. Parent resource name. - // - Format:projects/[PROJECT-ID] - // - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + // + // The format of this value varies depending on whether you have [specified a + // processing + // location](https://cloud.google.com/dlp/docs/specifying-location): + // + // + Projects scope, location specified:
+ // `projects/`PROJECT_ID`/locations/`LOCATION_ID + // + Projects scope, no location specified (defaults to global):
+ // `projects/`PROJECT_ID + // + // The following example `parent` string specifies a parent project with the + // identifier `example-project`, and specifies the `europe-west3` location + // for processing data: + // + // parent=projects/example-project/locations/europe-west3 string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { @@ -3485,8 +3622,21 @@ message GetDlpJobRequest { // The request message for listing DLP jobs. message ListDlpJobsRequest { // Required. Parent resource name. - // - Format:projects/[PROJECT-ID] - // - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + // + // The format of this value varies depending on whether you have [specified a + // processing + // location](https://cloud.google.com/dlp/docs/specifying-location): + // + // + Projects scope, location specified:
+ // `projects/`PROJECT_ID`/locations/`LOCATION_ID + // + Projects scope, no location specified (defaults to global):
+ // `projects/`PROJECT_ID + // + // The following example `parent` string specifies a parent project with the + // identifier `example-project`, and specifies the `europe-west3` location + // for processing data: + // + // parent=projects/example-project/locations/europe-west3 string parent = 4 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { @@ -3597,10 +3747,25 @@ message DeleteDlpJobRequest { // Request message for CreateDeidentifyTemplate. message CreateDeidentifyTemplateRequest { // Required. Parent resource name. - // - Format:projects/[PROJECT-ID] - // - Format:organizations/[ORGANIZATION-ID] - // - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] - // - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + // + // The format of this value varies depending on the scope of the request + // (project or organization) and whether you have [specified a processing + // location](https://cloud.google.com/dlp/docs/specifying-location): + // + // + Projects scope, location specified:
+ // `projects/`PROJECT_ID`/locations/`LOCATION_ID + // + Projects scope, no location specified (defaults to global):
+ // `projects/`PROJECT_ID + // + Organizations scope, location specified:
+ // `organizations/`ORG_ID`/locations/`LOCATION_ID + // + Organizations scope, no location specified (defaults to global):
+ // `organizations/`ORG_ID + // + // The following example `parent` string specifies a parent project with the + // identifier `example-project`, and specifies the `europe-west3` location + // for processing data: + // + // parent=projects/example-project/locations/europe-west3 string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { @@ -3613,7 +3778,7 @@ message CreateDeidentifyTemplateRequest { // The template id can contain uppercase and lowercase letters, // numbers, and hyphens; that is, it must match the regular - // expression: `[a-zA-Z\\d-_]+`. The maximum length is 100 + // expression: `[a-zA-Z\d-_]+`. The maximum length is 100 // characters. Can be empty to allow the system to generate one. string template_id = 3; @@ -3656,10 +3821,25 @@ message GetDeidentifyTemplateRequest { // Request message for ListDeidentifyTemplates. message ListDeidentifyTemplatesRequest { // Required. Parent resource name. - // - Format:projects/[PROJECT-ID] - // - Format:organizations/[ORGANIZATION-ID] - // - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] - // - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + // + // The format of this value varies depending on the scope of the request + // (project or organization) and whether you have [specified a processing + // location](https://cloud.google.com/dlp/docs/specifying-location): + // + // + Projects scope, location specified:
+ // `projects/`PROJECT_ID`/locations/`LOCATION_ID + // + Projects scope, no location specified (defaults to global):
+ // `projects/`PROJECT_ID + // + Organizations scope, location specified:
+ // `organizations/`ORG_ID`/locations/`LOCATION_ID + // + Organizations scope, no location specified (defaults to global):
+ // `organizations/`ORG_ID + // + // The following example `parent` string specifies a parent project with the + // identifier `example-project`, and specifies the `europe-west3` location + // for processing data: + // + // parent=projects/example-project/locations/europe-west3 string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { @@ -3836,10 +4016,25 @@ message StoredInfoType { // Request message for CreateStoredInfoType. message CreateStoredInfoTypeRequest { // Required. Parent resource name. - // - Format:projects/[PROJECT-ID] - // - Format:organizations/[ORGANIZATION-ID] - // - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] - // - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + // + // The format of this value varies depending on the scope of the request + // (project or organization) and whether you have [specified a processing + // location](https://cloud.google.com/dlp/docs/specifying-location): + // + // + Projects scope, location specified:
+ // `projects/`PROJECT_ID`/locations/`LOCATION_ID + // + Projects scope, no location specified (defaults to global):
+ // `projects/`PROJECT_ID + // + Organizations scope, location specified:
+ // `organizations/`ORG_ID`/locations/`LOCATION_ID + // + Organizations scope, no location specified (defaults to global):
+ // `organizations/`ORG_ID + // + // The following example `parent` string specifies a parent project with the + // identifier `example-project`, and specifies the `europe-west3` location + // for processing data: + // + // parent=projects/example-project/locations/europe-west3 string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { @@ -3852,7 +4047,7 @@ message CreateStoredInfoTypeRequest { // The storedInfoType ID can contain uppercase and lowercase letters, // numbers, and hyphens; that is, it must match the regular - // expression: `[a-zA-Z\\d-_]+`. The maximum length is 100 + // expression: `[a-zA-Z\d-_]+`. The maximum length is 100 // characters. Can be empty to allow the system to generate one. string stored_info_type_id = 3; @@ -3897,10 +4092,25 @@ message GetStoredInfoTypeRequest { // Request message for ListStoredInfoTypes. message ListStoredInfoTypesRequest { // Required. Parent resource name. - // - Format:projects/[PROJECT-ID] - // - Format:organizations/[ORGANIZATION-ID] - // - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] - // - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + // + // The format of this value varies depending on the scope of the request + // (project or organization) and whether you have [specified a processing + // location](https://cloud.google.com/dlp/docs/specifying-location): + // + // + Projects scope, location specified:
+ // `projects/`PROJECT_ID`/locations/`LOCATION_ID + // + Projects scope, no location specified (defaults to global):
+ // `projects/`PROJECT_ID + // + Organizations scope, location specified:
+ // `organizations/`ORG_ID`/locations/`LOCATION_ID + // + Organizations scope, no location specified (defaults to global):
+ // `organizations/`ORG_ID + // + // The following example `parent` string specifies a parent project with the + // identifier `example-project`, and specifies the `europe-west3` location + // for processing data: + // + // parent=projects/example-project/locations/europe-west3 string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { diff --git a/google/cloud/dlp_v2/proto/storage.proto b/google/cloud/dlp_v2/proto/storage.proto index b25cf4fe..6ded28b1 100644 --- a/google/cloud/dlp_v2/proto/storage.proto +++ b/google/cloud/dlp_v2/proto/storage.proto @@ -33,8 +33,8 @@ message InfoType { // Name of the information type. Either a name of your choosing when // creating a CustomInfoType, or one of the names listed // at https://cloud.google.com/dlp/docs/infotypes-reference when specifying - // a built-in type. InfoType names should conform to the pattern - // `[a-zA-Z0-9_]{1,64}`. + // a built-in type. When sending Cloud DLP results to Data Catalog, infoType + // names should conform to the pattern `[A-Za-z0-9$-_]{1,64}`. string name = 1; } @@ -425,14 +425,16 @@ message CloudStoragePath { message BigQueryOptions { // How to sample rows if not all rows are scanned. Meaningful only when used // in conjunction with either rows_limit or rows_limit_percent. If not - // specified, scanning would start from the top. + // specified, rows are scanned in the order BigQuery reads them. enum SampleMethod { SAMPLE_METHOD_UNSPECIFIED = 0; - // Scan from the top (default). + // Scan groups of rows in the order BigQuery provides (default). Multiple + // groups of rows may be scanned in parallel, so results may not appear in + // the same order the rows are read. TOP = 1; - // Randomly pick the row to start scanning. The scanned rows are contiguous. + // Randomly pick groups of rows to scan. RANDOM_START = 2; } @@ -471,27 +473,31 @@ message StorageConfig { // Configuration of the timespan of the items to include in scanning. // Currently only supported when inspecting Google Cloud Storage and BigQuery. message TimespanConfig { - // Exclude files or rows older than this value. + // Exclude files, tables, or rows older than this value. + // If not set, no lower time limit is applied. google.protobuf.Timestamp start_time = 1; - // Exclude files or rows newer than this value. - // If set to zero, no upper time limit is applied. + // Exclude files, tables, or rows newer than this value. + // If not set, no upper time limit is applied. google.protobuf.Timestamp end_time = 2; // Specification of the field containing the timestamp of scanned items. // Used for data sources like Datastore and BigQuery. // // For BigQuery: - // Required to filter out rows based on the given start and - // end times. If not specified and the table was modified between the given - // start and end times, the entire table will be scanned. - // The valid data types of the timestamp field are: `INTEGER`, `DATE`, - // `TIMESTAMP`, or `DATETIME` BigQuery column. + // If this value is not specified and the table was modified between the + // given start and end times, the entire table will be scanned. If this + // value is specified, then rows are filtered based on the given start and + // end times. Rows with a `NULL` value in the provided BigQuery column are + // skipped. + // Valid data types of the provided BigQuery column are: `INTEGER`, `DATE`, + // `TIMESTAMP`, and `DATETIME`. // - // For Datastore. - // Valid data types of the timestamp field are: `TIMESTAMP`. - // Datastore entity will be scanned if the timestamp property does not - // exist or its value is empty or invalid. + // For Datastore: + // If this value is specified, then entities are filtered based on the given + // start and end times. If an entity does not contain the provided timestamp + // property or contains empty or invalid values, then it is included. + // Valid data types of the provided timestamp property are: `TIMESTAMP`. FieldId timestamp_field = 3; // When the job is started by a JobTrigger we will automatically figure out diff --git a/google/cloud/dlp_v2/services/dlp_service/async_client.py b/google/cloud/dlp_v2/services/dlp_service/async_client.py index 3ffaf6b5..a8048473 100644 --- a/google/cloud/dlp_v2/services/dlp_service/async_client.py +++ b/google/cloud/dlp_v2/services/dlp_service/async_client.py @@ -33,7 +33,7 @@ from google.protobuf import field_mask_pb2 as field_mask # type: ignore from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from .transports.base import DlpServiceTransport +from .transports.base import DlpServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import DlpServiceGrpcAsyncIOTransport from .client import DlpServiceClient @@ -55,15 +55,62 @@ class DlpServiceAsyncClient: DEFAULT_ENDPOINT = DlpServiceClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = DlpServiceClient.DEFAULT_MTLS_ENDPOINT + deidentify_template_path = staticmethod(DlpServiceClient.deidentify_template_path) + parse_deidentify_template_path = staticmethod( + DlpServiceClient.parse_deidentify_template_path + ) + dlp_content_path = staticmethod(DlpServiceClient.dlp_content_path) + parse_dlp_content_path = staticmethod(DlpServiceClient.parse_dlp_content_path) + dlp_job_path = staticmethod(DlpServiceClient.dlp_job_path) + parse_dlp_job_path = staticmethod(DlpServiceClient.parse_dlp_job_path) + finding_path = staticmethod(DlpServiceClient.finding_path) + parse_finding_path = staticmethod(DlpServiceClient.parse_finding_path) + inspect_template_path = staticmethod(DlpServiceClient.inspect_template_path) + parse_inspect_template_path = staticmethod( + DlpServiceClient.parse_inspect_template_path + ) job_trigger_path = staticmethod(DlpServiceClient.job_trigger_path) + parse_job_trigger_path = staticmethod(DlpServiceClient.parse_job_trigger_path) + stored_info_type_path = staticmethod(DlpServiceClient.stored_info_type_path) + parse_stored_info_type_path = staticmethod( + DlpServiceClient.parse_stored_info_type_path + ) - inspect_template_path = staticmethod(DlpServiceClient.inspect_template_path) + common_billing_account_path = staticmethod( + DlpServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + DlpServiceClient.parse_common_billing_account_path + ) - deidentify_template_path = staticmethod(DlpServiceClient.deidentify_template_path) + common_folder_path = staticmethod(DlpServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(DlpServiceClient.parse_common_folder_path) + + common_organization_path = staticmethod(DlpServiceClient.common_organization_path) + parse_common_organization_path = staticmethod( + DlpServiceClient.parse_common_organization_path + ) + + common_project_path = staticmethod(DlpServiceClient.common_project_path) + parse_common_project_path = staticmethod(DlpServiceClient.parse_common_project_path) + + common_location_path = staticmethod(DlpServiceClient.common_location_path) + parse_common_location_path = staticmethod( + DlpServiceClient.parse_common_location_path + ) from_service_account_file = DlpServiceClient.from_service_account_file from_service_account_json = from_service_account_file + @property + def transport(self) -> DlpServiceTransport: + """Return the transport used by the client instance. + + Returns: + DlpServiceTransport: The transport used by the client instance. + """ + return self._client.transport + get_transport_class = functools.partial( type(DlpServiceClient).get_transport_class, type(DlpServiceClient) ) @@ -74,6 +121,7 @@ def __init__( credentials: credentials.Credentials = None, transport: Union[str, DlpServiceTransport] = "grpc_asyncio", client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the dlp service client. @@ -89,16 +137,19 @@ def __init__( client_options (ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint, this is the default value for - the environment variable) and "auto" (auto switch to the default - mTLS endpoint if client SSL credentials is present). However, - the ``api_endpoint`` property takes precedence if provided. - (2) The ``client_cert_source`` property is used to provide client - SSL credentials for mutual TLS transport. If not provided, the - default SSL credentials will be used if present. + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -106,7 +157,10 @@ def __init__( """ self._client = DlpServiceClient( - credentials=credentials, transport=transport, client_options=client_options, + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, ) async def inspect_content( @@ -160,7 +214,7 @@ async def inspect_content( ), ), default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -227,7 +281,7 @@ async def redact_image( ), ), default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -295,7 +349,7 @@ async def deidentify_content( ), ), default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -353,7 +407,7 @@ async def reidentify_content( ), ), default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -388,7 +442,11 @@ async def list_info_types( parent (:class:`str`): The parent resource name. - - Format:locations/[LOCATION-ID] + The format of this value is as follows: + + :: + + locations/LOCATION_ID This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -408,7 +466,8 @@ async def list_info_types( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent]): + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -435,7 +494,7 @@ async def list_info_types( ), ), default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Send the request. @@ -466,10 +525,28 @@ async def create_inspect_template( parent (:class:`str`): Required. Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:organizations/[ORGANIZATION-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] - - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -500,7 +577,8 @@ async def create_inspect_template( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent, inspect_template]): + has_flattened_params = any([parent, inspect_template]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -521,7 +599,7 @@ async def create_inspect_template( rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_inspect_template, default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -595,7 +673,8 @@ async def update_inspect_template( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name, inspect_template, update_mask]): + has_flattened_params = any([name, inspect_template, update_mask]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -618,7 +697,7 @@ async def update_inspect_template( rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_inspect_template, default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -679,7 +758,8 @@ async def get_inspect_template( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -706,7 +786,7 @@ async def get_inspect_template( ), ), default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -741,10 +821,28 @@ async def list_inspect_templates( parent (:class:`str`): Required. Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:organizations/[ORGANIZATION-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] - - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -767,7 +865,8 @@ async def list_inspect_templates( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent]): + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -794,7 +893,7 @@ async def list_inspect_templates( ), ), default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -850,7 +949,8 @@ async def delete_inspect_template( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -877,7 +977,7 @@ async def delete_inspect_template( ), ), default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -914,10 +1014,28 @@ async def create_deidentify_template( parent (:class:`str`): Required. Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:organizations/[ORGANIZATION-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] - - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -946,7 +1064,8 @@ async def create_deidentify_template( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent, deidentify_template]): + has_flattened_params = any([parent, deidentify_template]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -967,7 +1086,7 @@ async def create_deidentify_template( rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_deidentify_template, default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1039,7 +1158,8 @@ async def update_deidentify_template( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name, deidentify_template, update_mask]): + has_flattened_params = any([name, deidentify_template, update_mask]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1062,7 +1182,7 @@ async def update_deidentify_template( rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_deidentify_template, default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1121,7 +1241,8 @@ async def get_deidentify_template( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1148,7 +1269,7 @@ async def get_deidentify_template( ), ), default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1183,10 +1304,28 @@ async def list_deidentify_templates( parent (:class:`str`): Required. Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:organizations/[ORGANIZATION-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] - - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1209,7 +1348,8 @@ async def list_deidentify_templates( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent]): + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1236,7 +1376,7 @@ async def list_deidentify_templates( ), ), default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1292,7 +1432,8 @@ async def delete_deidentify_template( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1319,7 +1460,7 @@ async def delete_deidentify_template( ), ), default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1356,8 +1497,23 @@ async def create_job_trigger( parent (:class:`str`): Required. Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1384,7 +1540,8 @@ async def create_job_trigger( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent, job_trigger]): + has_flattened_params = any([parent, job_trigger]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1405,7 +1562,7 @@ async def create_job_trigger( rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_job_trigger, default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1475,7 +1632,8 @@ async def update_job_trigger( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name, job_trigger, update_mask]): + has_flattened_params = any([name, job_trigger, update_mask]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1498,7 +1656,7 @@ async def update_job_trigger( rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_job_trigger, default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1558,7 +1716,8 @@ async def hybrid_inspect_job_trigger( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1577,7 +1736,7 @@ async def hybrid_inspect_job_trigger( rpc = gapic_v1.method_async.wrap_method( self._client._transport.hybrid_inspect_job_trigger, default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1633,7 +1792,8 @@ async def get_job_trigger( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1660,7 +1820,7 @@ async def get_job_trigger( ), ), default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1694,8 +1854,23 @@ async def list_job_triggers( parent (:class:`str`): Required. Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1717,7 +1892,8 @@ async def list_job_triggers( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent]): + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1744,7 +1920,7 @@ async def list_job_triggers( ), ), default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1799,7 +1975,8 @@ async def delete_job_trigger( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1826,7 +2003,7 @@ async def delete_job_trigger( ), ), default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1878,7 +2055,7 @@ async def activate_job_trigger( rpc = gapic_v1.method_async.wrap_method( self._client._transport.activate_job_trigger, default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -1923,8 +2100,23 @@ async def create_dlp_job( parent (:class:`str`): Required. Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1956,7 +2148,8 @@ async def create_dlp_job( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent, inspect_job, risk_job]): + has_flattened_params = any([parent, inspect_job, risk_job]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -1979,7 +2172,7 @@ async def create_dlp_job( rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_dlp_job, default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -2016,8 +2209,23 @@ async def list_dlp_jobs( parent (:class:`str`): Required. Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -2040,7 +2248,8 @@ async def list_dlp_jobs( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent]): + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -2067,7 +2276,7 @@ async def list_dlp_jobs( ), ), default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -2128,7 +2337,8 @@ async def get_dlp_job( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -2155,7 +2365,7 @@ async def get_dlp_job( ), ), default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -2206,7 +2416,8 @@ async def delete_dlp_job( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -2233,7 +2444,7 @@ async def delete_dlp_job( ), ), default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -2282,7 +2493,7 @@ async def cancel_dlp_job( rpc = gapic_v1.method_async.wrap_method( self._client._transport.cancel_dlp_job, default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -2318,10 +2529,28 @@ async def create_stored_info_type( parent (:class:`str`): Required. Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:organizations/[ORGANIZATION-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] - - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -2348,7 +2577,8 @@ async def create_stored_info_type( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent, config]): + has_flattened_params = any([parent, config]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -2369,7 +2599,7 @@ async def create_stored_info_type( rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_stored_info_type, default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -2444,7 +2674,8 @@ async def update_stored_info_type( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name, config, update_mask]): + has_flattened_params = any([name, config, update_mask]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -2467,7 +2698,7 @@ async def update_stored_info_type( rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_stored_info_type, default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -2524,7 +2755,8 @@ async def get_stored_info_type( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -2551,7 +2783,7 @@ async def get_stored_info_type( ), ), default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -2586,10 +2818,28 @@ async def list_stored_info_types( parent (:class:`str`): Required. Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:organizations/[ORGANIZATION-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] - - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -2612,7 +2862,8 @@ async def list_stored_info_types( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([parent]): + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -2639,7 +2890,7 @@ async def list_stored_info_types( ), ), default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -2695,7 +2946,8 @@ async def delete_stored_info_type( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -2722,7 +2974,7 @@ async def delete_stored_info_type( ), ), default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -2780,7 +3032,8 @@ async def hybrid_inspect_dlp_job( # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. - if request is not None and any([name]): + has_flattened_params = any([name]) + if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." @@ -2799,7 +3052,7 @@ async def hybrid_inspect_dlp_job( rpc = gapic_v1.method_async.wrap_method( self._client._transport.hybrid_inspect_dlp_job, default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -2849,7 +3102,7 @@ async def finish_dlp_job( rpc = gapic_v1.method_async.wrap_method( self._client._transport.finish_dlp_job, default_timeout=300.0, - client_info=_client_info, + client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; @@ -2865,11 +3118,11 @@ async def finish_dlp_job( try: - _client_info = gapic_v1.client_info.ClientInfo( + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-dlp",).version, ) except pkg_resources.DistributionNotFound: - _client_info = gapic_v1.client_info.ClientInfo() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() __all__ = ("DlpServiceAsyncClient",) diff --git a/google/cloud/dlp_v2/services/dlp_service/client.py b/google/cloud/dlp_v2/services/dlp_service/client.py index b87b761f..7f0355f8 100644 --- a/google/cloud/dlp_v2/services/dlp_service/client.py +++ b/google/cloud/dlp_v2/services/dlp_service/client.py @@ -16,17 +16,19 @@ # from collections import OrderedDict +from distutils import util import os import re -from typing import Callable, Dict, Sequence, Tuple, Type, Union +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import client_options as client_options_lib # type: ignore from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials # type: ignore from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore @@ -35,7 +37,7 @@ from google.protobuf import field_mask_pb2 as field_mask # type: ignore from google.protobuf import timestamp_pb2 as timestamp # type: ignore -from .transports.base import DlpServiceTransport +from .transports.base import DlpServiceTransport, DEFAULT_CLIENT_INFO from .transports.grpc import DlpServiceGrpcTransport from .transports.grpc_asyncio import DlpServiceGrpcAsyncIOTransport @@ -137,6 +139,15 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @property + def transport(self) -> DlpServiceTransport: + """Return the transport used by the client instance. + + Returns: + DlpServiceTransport: The transport used by the client instance. + """ + return self._transport + @staticmethod def deidentify_template_path(organization: str, deidentify_template: str,) -> str: """Return a fully-qualified deidentify_template string.""" @@ -153,6 +164,46 @@ def parse_deidentify_template_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def dlp_content_path(project: str,) -> str: + """Return a fully-qualified dlp_content string.""" + return "projects/{project}/dlpContent".format(project=project,) + + @staticmethod + def parse_dlp_content_path(path: str) -> Dict[str, str]: + """Parse a dlp_content path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/dlpContent$", path) + return m.groupdict() if m else {} + + @staticmethod + def dlp_job_path(project: str, dlp_job: str,) -> str: + """Return a fully-qualified dlp_job string.""" + return "projects/{project}/dlpJobs/{dlp_job}".format( + project=project, dlp_job=dlp_job, + ) + + @staticmethod + def parse_dlp_job_path(path: str) -> Dict[str, str]: + """Parse a dlp_job path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/dlpJobs/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def finding_path(project: str, location: str, finding: str,) -> str: + """Return a fully-qualified finding string.""" + return "projects/{project}/locations/{location}/findings/{finding}".format( + project=project, location=location, finding=finding, + ) + + @staticmethod + def parse_finding_path(path: str) -> Dict[str, str]: + """Parse a finding path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/findings/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def inspect_template_path(organization: str, inspect_template: str,) -> str: """Return a fully-qualified inspect_template string.""" @@ -184,12 +235,88 @@ def parse_job_trigger_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def stored_info_type_path(organization: str, stored_info_type: str,) -> str: + """Return a fully-qualified stored_info_type string.""" + return "organizations/{organization}/storedInfoTypes/{stored_info_type}".format( + organization=organization, stored_info_type=stored_info_type, + ) + + @staticmethod + def parse_stored_info_type_path(path: str) -> Dict[str, str]: + """Parse a stored_info_type path into its component segments.""" + m = re.match( + r"^organizations/(?P.+?)/storedInfoTypes/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str,) -> str: + """Return a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str,) -> str: + """Return a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder,) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str,) -> str: + """Return a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization,) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str,) -> str: + """Return a fully-qualified project string.""" + return "projects/{project}".format(project=project,) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str,) -> str: + """Return a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + def __init__( self, *, - credentials: credentials.Credentials = None, - transport: Union[str, DlpServiceTransport] = None, - client_options: ClientOptions = None, + credentials: Optional[credentials.Credentials] = None, + transport: Union[str, DlpServiceTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the dlp service client. @@ -202,48 +329,74 @@ def __init__( transport (Union[str, ~.DlpServiceTransport]): The transport to use. If set to None, a transport is chosen automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. + client_options (client_options_lib.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint, this is the default value for - the environment variable) and "auto" (auto switch to the default - mTLS endpoint if client SSL credentials is present). However, - the ``api_endpoint`` property takes precedence if provided. - (2) The ``client_cert_source`` property is used to provide client - SSL credentials for mutual TLS transport. If not provided, the - default SSL credentials will be used if present. + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ if isinstance(client_options, dict): - client_options = ClientOptions.from_dict(client_options) + client_options = client_options_lib.from_dict(client_options) if client_options is None: - client_options = ClientOptions.ClientOptions() + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool( + util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + ) - if client_options.api_endpoint is None: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never") + ssl_credentials = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + import grpc # type: ignore + + cert, key = client_options.client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + is_mtls = True + else: + creds = SslCredentials() + is_mtls = creds.is_mtls + ssl_credentials = creds.ssl_credentials if is_mtls else None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") if use_mtls_env == "never": - client_options.api_endpoint = self.DEFAULT_ENDPOINT + api_endpoint = self.DEFAULT_ENDPOINT elif use_mtls_env == "always": - client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT + api_endpoint = self.DEFAULT_MTLS_ENDPOINT elif use_mtls_env == "auto": - has_client_cert_source = ( - client_options.client_cert_source is not None - or mtls.has_default_client_cert_source() - ) - client_options.api_endpoint = ( - self.DEFAULT_MTLS_ENDPOINT - if has_client_cert_source - else self.DEFAULT_ENDPOINT + api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT ) else: raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always" + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always" ) # Save or instantiate the transport. @@ -267,11 +420,11 @@ def __init__( self._transport = Transport( credentials=credentials, credentials_file=client_options.credentials_file, - host=client_options.api_endpoint, + host=api_endpoint, scopes=client_options.scopes, - api_mtls_endpoint=client_options.api_endpoint, - client_cert_source=client_options.client_cert_source, + ssl_channel_credentials=ssl_credentials, quota_project_id=client_options.quota_project_id, + client_info=client_info, ) def inspect_content( @@ -525,7 +678,11 @@ def list_info_types( parent (:class:`str`): The parent resource name. - - Format:locations/[LOCATION-ID] + The format of this value is as follows: + + :: + + locations/LOCATION_ID This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -597,10 +754,28 @@ def create_inspect_template( parent (:class:`str`): Required. Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:organizations/[ORGANIZATION-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] - - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -870,10 +1045,28 @@ def list_inspect_templates( parent (:class:`str`): Required. Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:organizations/[ORGANIZATION-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] - - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1031,10 +1224,28 @@ def create_deidentify_template( parent (:class:`str`): Required. Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:organizations/[ORGANIZATION-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] - - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1302,10 +1513,28 @@ def list_deidentify_templates( parent (:class:`str`): Required. Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:organizations/[ORGANIZATION-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] - - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1467,8 +1696,23 @@ def create_job_trigger( parent (:class:`str`): Required. Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1807,8 +2051,23 @@ def list_job_triggers( parent (:class:`str`): Required. Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -2025,8 +2284,23 @@ def create_dlp_job( parent (:class:`str`): Required. Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -2120,8 +2394,23 @@ def list_dlp_jobs( parent (:class:`str`): Required. Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -2405,10 +2694,28 @@ def create_stored_info_type( parent (:class:`str`): Required. Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:organizations/[ORGANIZATION-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] - - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -2671,10 +2978,28 @@ def list_stored_info_types( parent (:class:`str`): Required. Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:organizations/[ORGANIZATION-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] - - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -2941,11 +3266,11 @@ def finish_dlp_job( try: - _client_info = gapic_v1.client_info.ClientInfo( + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-dlp",).version, ) except pkg_resources.DistributionNotFound: - _client_info = gapic_v1.client_info.ClientInfo() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() __all__ = ("DlpServiceClient",) diff --git a/google/cloud/dlp_v2/services/dlp_service/transports/base.py b/google/cloud/dlp_v2/services/dlp_service/transports/base.py index 9a9978f1..aff4d1e5 100644 --- a/google/cloud/dlp_v2/services/dlp_service/transports/base.py +++ b/google/cloud/dlp_v2/services/dlp_service/transports/base.py @@ -19,7 +19,7 @@ import typing import pkg_resources -from google import auth +from google import auth # type: ignore from google.api_core import exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore @@ -30,11 +30,11 @@ try: - _client_info = gapic_v1.client_info.ClientInfo( + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution("google-cloud-dlp",).version, ) except pkg_resources.DistributionNotFound: - _client_info = gapic_v1.client_info.ClientInfo() + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() class DlpServiceTransport(abc.ABC): @@ -50,6 +50,7 @@ def __init__( credentials_file: typing.Optional[str] = None, scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, quota_project_id: typing.Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, **kwargs, ) -> None: """Instantiate the transport. @@ -67,6 +68,11 @@ def __init__( scope (Optional[Sequence[str]]): A list of scopes. quota_project_id (Optional[str]): An optional project to use for billing and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. """ # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: @@ -94,9 +100,9 @@ def __init__( self._credentials = credentials # Lifted into its own function so it can be stubbed out during tests. - self._prep_wrapped_messages() + self._prep_wrapped_messages(client_info) - def _prep_wrapped_messages(self): + def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.inspect_content: gapic_v1.method.wrap_method( @@ -110,7 +116,7 @@ def _prep_wrapped_messages(self): ), ), default_timeout=300.0, - client_info=_client_info, + client_info=client_info, ), self.redact_image: gapic_v1.method.wrap_method( self.redact_image, @@ -123,7 +129,7 @@ def _prep_wrapped_messages(self): ), ), default_timeout=300.0, - client_info=_client_info, + client_info=client_info, ), self.deidentify_content: gapic_v1.method.wrap_method( self.deidentify_content, @@ -136,7 +142,7 @@ def _prep_wrapped_messages(self): ), ), default_timeout=300.0, - client_info=_client_info, + client_info=client_info, ), self.reidentify_content: gapic_v1.method.wrap_method( self.reidentify_content, @@ -149,7 +155,7 @@ def _prep_wrapped_messages(self): ), ), default_timeout=300.0, - client_info=_client_info, + client_info=client_info, ), self.list_info_types: gapic_v1.method.wrap_method( self.list_info_types, @@ -162,17 +168,17 @@ def _prep_wrapped_messages(self): ), ), default_timeout=300.0, - client_info=_client_info, + client_info=client_info, ), self.create_inspect_template: gapic_v1.method.wrap_method( self.create_inspect_template, default_timeout=300.0, - client_info=_client_info, + client_info=client_info, ), self.update_inspect_template: gapic_v1.method.wrap_method( self.update_inspect_template, default_timeout=300.0, - client_info=_client_info, + client_info=client_info, ), self.get_inspect_template: gapic_v1.method.wrap_method( self.get_inspect_template, @@ -185,7 +191,7 @@ def _prep_wrapped_messages(self): ), ), default_timeout=300.0, - client_info=_client_info, + client_info=client_info, ), self.list_inspect_templates: gapic_v1.method.wrap_method( self.list_inspect_templates, @@ -198,7 +204,7 @@ def _prep_wrapped_messages(self): ), ), default_timeout=300.0, - client_info=_client_info, + client_info=client_info, ), self.delete_inspect_template: gapic_v1.method.wrap_method( self.delete_inspect_template, @@ -211,17 +217,17 @@ def _prep_wrapped_messages(self): ), ), default_timeout=300.0, - client_info=_client_info, + client_info=client_info, ), self.create_deidentify_template: gapic_v1.method.wrap_method( self.create_deidentify_template, default_timeout=300.0, - client_info=_client_info, + client_info=client_info, ), self.update_deidentify_template: gapic_v1.method.wrap_method( self.update_deidentify_template, default_timeout=300.0, - client_info=_client_info, + client_info=client_info, ), self.get_deidentify_template: gapic_v1.method.wrap_method( self.get_deidentify_template, @@ -234,7 +240,7 @@ def _prep_wrapped_messages(self): ), ), default_timeout=300.0, - client_info=_client_info, + client_info=client_info, ), self.list_deidentify_templates: gapic_v1.method.wrap_method( self.list_deidentify_templates, @@ -247,7 +253,7 @@ def _prep_wrapped_messages(self): ), ), default_timeout=300.0, - client_info=_client_info, + client_info=client_info, ), self.delete_deidentify_template: gapic_v1.method.wrap_method( self.delete_deidentify_template, @@ -260,22 +266,18 @@ def _prep_wrapped_messages(self): ), ), default_timeout=300.0, - client_info=_client_info, + client_info=client_info, ), self.create_job_trigger: gapic_v1.method.wrap_method( - self.create_job_trigger, - default_timeout=300.0, - client_info=_client_info, + self.create_job_trigger, default_timeout=300.0, client_info=client_info, ), self.update_job_trigger: gapic_v1.method.wrap_method( - self.update_job_trigger, - default_timeout=300.0, - client_info=_client_info, + self.update_job_trigger, default_timeout=300.0, client_info=client_info, ), self.hybrid_inspect_job_trigger: gapic_v1.method.wrap_method( self.hybrid_inspect_job_trigger, default_timeout=300.0, - client_info=_client_info, + client_info=client_info, ), self.get_job_trigger: gapic_v1.method.wrap_method( self.get_job_trigger, @@ -288,7 +290,7 @@ def _prep_wrapped_messages(self): ), ), default_timeout=300.0, - client_info=_client_info, + client_info=client_info, ), self.list_job_triggers: gapic_v1.method.wrap_method( self.list_job_triggers, @@ -301,7 +303,7 @@ def _prep_wrapped_messages(self): ), ), default_timeout=300.0, - client_info=_client_info, + client_info=client_info, ), self.delete_job_trigger: gapic_v1.method.wrap_method( self.delete_job_trigger, @@ -314,15 +316,15 @@ def _prep_wrapped_messages(self): ), ), default_timeout=300.0, - client_info=_client_info, + client_info=client_info, ), self.activate_job_trigger: gapic_v1.method.wrap_method( self.activate_job_trigger, default_timeout=300.0, - client_info=_client_info, + client_info=client_info, ), self.create_dlp_job: gapic_v1.method.wrap_method( - self.create_dlp_job, default_timeout=300.0, client_info=_client_info, + self.create_dlp_job, default_timeout=300.0, client_info=client_info, ), self.list_dlp_jobs: gapic_v1.method.wrap_method( self.list_dlp_jobs, @@ -335,7 +337,7 @@ def _prep_wrapped_messages(self): ), ), default_timeout=300.0, - client_info=_client_info, + client_info=client_info, ), self.get_dlp_job: gapic_v1.method.wrap_method( self.get_dlp_job, @@ -348,7 +350,7 @@ def _prep_wrapped_messages(self): ), ), default_timeout=300.0, - client_info=_client_info, + client_info=client_info, ), self.delete_dlp_job: gapic_v1.method.wrap_method( self.delete_dlp_job, @@ -361,20 +363,20 @@ def _prep_wrapped_messages(self): ), ), default_timeout=300.0, - client_info=_client_info, + client_info=client_info, ), self.cancel_dlp_job: gapic_v1.method.wrap_method( - self.cancel_dlp_job, default_timeout=300.0, client_info=_client_info, + self.cancel_dlp_job, default_timeout=300.0, client_info=client_info, ), self.create_stored_info_type: gapic_v1.method.wrap_method( self.create_stored_info_type, default_timeout=300.0, - client_info=_client_info, + client_info=client_info, ), self.update_stored_info_type: gapic_v1.method.wrap_method( self.update_stored_info_type, default_timeout=300.0, - client_info=_client_info, + client_info=client_info, ), self.get_stored_info_type: gapic_v1.method.wrap_method( self.get_stored_info_type, @@ -387,7 +389,7 @@ def _prep_wrapped_messages(self): ), ), default_timeout=300.0, - client_info=_client_info, + client_info=client_info, ), self.list_stored_info_types: gapic_v1.method.wrap_method( self.list_stored_info_types, @@ -400,7 +402,7 @@ def _prep_wrapped_messages(self): ), ), default_timeout=300.0, - client_info=_client_info, + client_info=client_info, ), self.delete_stored_info_type: gapic_v1.method.wrap_method( self.delete_stored_info_type, @@ -413,15 +415,15 @@ def _prep_wrapped_messages(self): ), ), default_timeout=300.0, - client_info=_client_info, + client_info=client_info, ), self.hybrid_inspect_dlp_job: gapic_v1.method.wrap_method( self.hybrid_inspect_dlp_job, default_timeout=300.0, - client_info=_client_info, + client_info=client_info, ), self.finish_dlp_job: gapic_v1.method.wrap_method( - self.finish_dlp_job, default_timeout=300.0, client_info=_client_info, + self.finish_dlp_job, default_timeout=300.0, client_info=client_info, ), } diff --git a/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py b/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py index 2605ae97..0a0cb81b 100644 --- a/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py +++ b/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py @@ -15,20 +15,21 @@ # limitations under the License. # +import warnings from typing import Callable, Dict, Optional, Sequence, Tuple from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore from google import auth # type: ignore from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore - import grpc # type: ignore from google.cloud.dlp_v2.types import dlp from google.protobuf import empty_pb2 as empty # type: ignore -from .base import DlpServiceTransport +from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO class DlpServiceGrpcTransport(DlpServiceTransport): @@ -64,7 +65,9 @@ def __init__( channel: grpc.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None + ssl_channel_credentials: grpc.ChannelCredentials = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the transport. @@ -83,16 +86,23 @@ def __init__( ignored if ``channel`` is provided. channel (Optional[grpc.Channel]): A ``Channel`` instance through which to make calls. - api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If - provided, it overrides the ``host`` argument and tries to create + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A - callback to provide client SSL certificate bytes and private key - bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` - is None. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -100,6 +110,8 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -107,7 +119,13 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + host = ( api_mtls_endpoint if ":" in api_mtls_endpoint @@ -138,6 +156,24 @@ def __init__( scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, ) + self._ssl_channel_credentials = ssl_credentials + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) self._stubs = {} # type: Dict[str, Callable] @@ -148,6 +184,7 @@ def __init__( credentials_file=credentials_file, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + client_info=client_info, ) @classmethod @@ -158,7 +195,7 @@ def create_channel( credentials_file: str = None, scopes: Optional[Sequence[str]] = None, quota_project_id: Optional[str] = None, - **kwargs + **kwargs, ) -> grpc.Channel: """Create and return a gRPC channel object. Args: @@ -192,24 +229,13 @@ def create_channel( credentials_file=credentials_file, scopes=scopes, quota_project_id=quota_project_id, - **kwargs + **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. + """Return the channel designed to connect to this service. """ - # Sanity check: Only create a new channel if we do not already - # have one. - if not hasattr(self, "_grpc_channel"): - self._grpc_channel = self.create_channel( - self._host, credentials=self._credentials, - ) - - # Return the channel from cache. return self._grpc_channel @property diff --git a/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py b/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py index e1ab0937..27b0f50c 100644 --- a/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py +++ b/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py @@ -15,9 +15,12 @@ # limitations under the License. # +import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple +from google.api_core import gapic_v1 # type: ignore from google.api_core import grpc_helpers_async # type: ignore +from google import auth # type: ignore from google.auth import credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore @@ -27,7 +30,7 @@ from google.cloud.dlp_v2.types import dlp from google.protobuf import empty_pb2 as empty # type: ignore -from .base import DlpServiceTransport +from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO from .grpc import DlpServiceGrpcTransport @@ -106,7 +109,9 @@ def __init__( channel: aio.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the transport. @@ -126,16 +131,23 @@ def __init__( are passed to :func:`google.auth.default`. channel (Optional[aio.Channel]): A ``Channel`` instance through which to make calls. - api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If - provided, it overrides the ``host`` argument and tries to create + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A - callback to provide client SSL certificate bytes and private key - bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` - is None. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -143,6 +155,8 @@ def __init__( google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` and ``credentials_file`` are passed. """ + self._ssl_channel_credentials = ssl_channel_credentials + if channel: # Sanity check: Ensure that channel and credentials are not both # provided. @@ -150,13 +164,24 @@ def __init__( # If a channel was explicitly provided, set it. self._grpc_channel = channel + self._ssl_channel_credentials = None elif api_mtls_endpoint: + warnings.warn( + "api_mtls_endpoint and client_cert_source are deprecated", + DeprecationWarning, + ) + host = ( api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" ) + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + # Create SSL credentials with client_cert_source or application # default SSL credentials. if client_cert_source: @@ -176,6 +201,24 @@ def __init__( scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, ) + self._ssl_channel_credentials = ssl_credentials + else: + host = host if ":" in host else host + ":443" + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_channel_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) # Run the base constructor. super().__init__( @@ -184,6 +227,7 @@ def __init__( credentials_file=credentials_file, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, + client_info=client_info, ) self._stubs = {} @@ -195,13 +239,6 @@ def grpc_channel(self) -> aio.Channel: This property caches on the instance; repeated calls return the same channel. """ - # Sanity check: Only create a new channel if we do not already - # have one. - if not hasattr(self, "_grpc_channel"): - self._grpc_channel = self.create_channel( - self._host, credentials=self._credentials, - ) - # Return the channel from cache. return self._grpc_channel diff --git a/google/cloud/dlp_v2/types/dlp.py b/google/cloud/dlp_v2/types/dlp.py index 521dd1e6..69fec852 100644 --- a/google/cloud/dlp_v2/types/dlp.py +++ b/google/cloud/dlp_v2/types/dlp.py @@ -276,7 +276,7 @@ class ExclusionRule(proto.Message): ) exclude_info_types = proto.Field( - proto.MESSAGE, number=3, oneof="type", message=ExcludeInfoTypes, + proto.MESSAGE, number=3, oneof="type", message="ExcludeInfoTypes", ) matching_type = proto.Field(proto.ENUM, number=4, enum="MatchingType",) @@ -301,7 +301,7 @@ class InspectionRule(proto.Message): ) exclusion_rule = proto.Field( - proto.MESSAGE, number=2, oneof="type", message=ExclusionRule, + proto.MESSAGE, number=2, oneof="type", message="ExclusionRule", ) @@ -321,7 +321,7 @@ class InspectionRuleSet(proto.Message): info_types = proto.RepeatedField(proto.MESSAGE, number=1, message=storage.InfoType,) - rules = proto.RepeatedField(proto.MESSAGE, number=2, message=InspectionRule,) + rules = proto.RepeatedField(proto.MESSAGE, number=2, message="InspectionRule",) class InspectConfig(proto.Message): @@ -438,14 +438,16 @@ class InfoTypeLimit(proto.Message): content_options = proto.RepeatedField(proto.ENUM, number=8, enum="ContentOption",) - rule_set = proto.RepeatedField(proto.MESSAGE, number=10, message=InspectionRuleSet,) + rule_set = proto.RepeatedField( + proto.MESSAGE, number=10, message="InspectionRuleSet", + ) class ByteContentItem(proto.Message): r"""Container for bytes to inspect or redact. Attributes: - type (~.dlp.ByteContentItem.BytesType): + type_ (~.dlp.ByteContentItem.BytesType): The type of data stored in the bytes string. Default will be TEXT_UTF8. data (bytes): @@ -467,7 +469,7 @@ class BytesType(proto.Enum): CSV = 12 TSV = 13 - type = proto.Field(proto.ENUM, number=1, enum=BytesType,) + type_ = proto.Field(proto.ENUM, number=1, enum=BytesType,) data = proto.Field(proto.BYTES, number=2) @@ -492,7 +494,7 @@ class ContentItem(proto.Message): table = proto.Field(proto.MESSAGE, number=4, oneof="data_item", message="Table",) byte_item = proto.Field( - proto.MESSAGE, number=5, oneof="data_item", message=ByteContentItem, + proto.MESSAGE, number=5, oneof="data_item", message="ByteContentItem", ) @@ -735,13 +737,13 @@ class MetadataLocation(proto.Message): r"""Metadata Location Attributes: - type (~.dlp.MetadataType): + type_ (~.dlp.MetadataType): Type of metadata containing the finding. storage_label (~.dlp.StorageMetadataLabel): Storage metadata. """ - type = proto.Field(proto.ENUM, number=1, enum="MetadataType",) + type_ = proto.Field(proto.ENUM, number=1, enum="MetadataType",) storage_label = proto.Field( proto.MESSAGE, number=3, oneof="label", message="StorageMetadataLabel", @@ -814,7 +816,7 @@ class Container(proto.Message): record. Attributes: - type (str): + type_ (str): Container type, for example BigQuery or Google Cloud Storage. project_id (str): @@ -853,7 +855,7 @@ class Container(proto.Message): ("generation" for Google Cloud Storage). """ - type = proto.Field(proto.STRING, number=1) + type_ = proto.Field(proto.STRING, number=1) project_id = proto.Field(proto.STRING, number=2) @@ -930,10 +932,24 @@ class RedactImageRequest(proto.Message): Attributes: parent (str): - The parent resource name. + Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID - - Format:projects/[PROJECT-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 location_id (str): Deprecated. This field has no effect. inspect_config (~.dlp.InspectConfig): @@ -981,7 +997,7 @@ class ImageRedactionConfig(proto.Message): location_id = proto.Field(proto.STRING, number=8) - inspect_config = proto.Field(proto.MESSAGE, number=2, message=InspectConfig,) + inspect_config = proto.Field(proto.MESSAGE, number=2, message="InspectConfig",) image_redaction_configs = proto.RepeatedField( proto.MESSAGE, number=5, message=ImageRedactionConfig, @@ -989,7 +1005,7 @@ class ImageRedactionConfig(proto.Message): include_findings = proto.Field(proto.BOOL, number=6) - byte_item = proto.Field(proto.MESSAGE, number=7, message=ByteContentItem,) + byte_item = proto.Field(proto.MESSAGE, number=7, message="ByteContentItem",) class Color(proto.Message): @@ -1034,7 +1050,7 @@ class RedactImageResponse(proto.Message): extracted_text = proto.Field(proto.STRING, number=2) - inspect_result = proto.Field(proto.MESSAGE, number=3, message=InspectResult,) + inspect_result = proto.Field(proto.MESSAGE, number=3, message="InspectResult",) class DeidentifyContentRequest(proto.Message): @@ -1044,8 +1060,22 @@ class DeidentifyContentRequest(proto.Message): parent (str): Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 deidentify_config (~.dlp.DeidentifyConfig): Configuration for the de-identification of the content item. Items specified here will override the template referenced @@ -1081,9 +1111,9 @@ class DeidentifyContentRequest(proto.Message): proto.MESSAGE, number=2, message="DeidentifyConfig", ) - inspect_config = proto.Field(proto.MESSAGE, number=3, message=InspectConfig,) + inspect_config = proto.Field(proto.MESSAGE, number=3, message="InspectConfig",) - item = proto.Field(proto.MESSAGE, number=4, message=ContentItem,) + item = proto.Field(proto.MESSAGE, number=4, message="ContentItem",) inspect_template_name = proto.Field(proto.STRING, number=5) @@ -1102,7 +1132,7 @@ class DeidentifyContentResponse(proto.Message): An overview of the changes that were made on the ``item``. """ - item = proto.Field(proto.MESSAGE, number=1, message=ContentItem,) + item = proto.Field(proto.MESSAGE, number=1, message="ContentItem",) overview = proto.Field(proto.MESSAGE, number=2, message="TransformationOverview",) @@ -1112,10 +1142,24 @@ class ReidentifyContentRequest(proto.Message): Attributes: parent (str): - Required. The parent resource name. + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: - - Format:projects/[PROJECT-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + :: + + parent=projects/example-project/locations/europe-west3 reidentify_config (~.dlp.DeidentifyConfig): Configuration for the re-identification of the content item. This field shares the same proto message type that is used @@ -1145,10 +1189,12 @@ class ReidentifyContentRequest(proto.Message): Template to use. References an instance of ``DeidentifyTemplate``. Any configuration directly specified in ``reidentify_config`` or ``inspect_config`` will override - those set in the template. Singular fields that are set in - this request will replace their corresponding fields in the - template. Repeated fields are appended. Singular - sub-messages and groups are recursively merged. + those set in the template. The ``DeidentifyTemplate`` used + must include only reversible transformations. Singular + fields that are set in this request will replace their + corresponding fields in the template. Repeated fields are + appended. Singular sub-messages and groups are recursively + merged. location_id (str): Deprecated. This field has no effect. """ @@ -1159,9 +1205,9 @@ class ReidentifyContentRequest(proto.Message): proto.MESSAGE, number=2, message="DeidentifyConfig", ) - inspect_config = proto.Field(proto.MESSAGE, number=3, message=InspectConfig,) + inspect_config = proto.Field(proto.MESSAGE, number=3, message="InspectConfig",) - item = proto.Field(proto.MESSAGE, number=4, message=ContentItem,) + item = proto.Field(proto.MESSAGE, number=4, message="ContentItem",) inspect_template_name = proto.Field(proto.STRING, number=5) @@ -1180,7 +1226,7 @@ class ReidentifyContentResponse(proto.Message): An overview of the changes that were made to the ``item``. """ - item = proto.Field(proto.MESSAGE, number=1, message=ContentItem,) + item = proto.Field(proto.MESSAGE, number=1, message="ContentItem",) overview = proto.Field(proto.MESSAGE, number=2, message="TransformationOverview",) @@ -1193,8 +1239,22 @@ class InspectContentRequest(proto.Message): parent (str): Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 inspect_config (~.dlp.InspectConfig): Configuration for the inspector. What specified here will override the template referenced by the @@ -1214,9 +1274,9 @@ class InspectContentRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) - inspect_config = proto.Field(proto.MESSAGE, number=2, message=InspectConfig,) + inspect_config = proto.Field(proto.MESSAGE, number=2, message="InspectConfig",) - item = proto.Field(proto.MESSAGE, number=3, message=ContentItem,) + item = proto.Field(proto.MESSAGE, number=3, message="ContentItem",) inspect_template_name = proto.Field(proto.STRING, number=4) @@ -1231,7 +1291,7 @@ class InspectContentResponse(proto.Message): The findings. """ - result = proto.Field(proto.MESSAGE, number=1, message=InspectResult,) + result = proto.Field(proto.MESSAGE, number=1, message="InspectResult",) class OutputStorageConfig(proto.Message): @@ -1355,7 +1415,7 @@ class Result(proto.Message): total_estimated_bytes = proto.Field(proto.INT64, number=2) info_type_stats = proto.RepeatedField( - proto.MESSAGE, number=3, message=InfoTypeStats, + proto.MESSAGE, number=3, message="InfoTypeStats", ) hybrid_stats = proto.Field( @@ -1428,7 +1488,11 @@ class ListInfoTypesRequest(proto.Message): parent (str): The parent resource name. - - Format:locations/[LOCATION-ID] + The format of this value is as follows: + + :: + + locations/LOCATION_ID language_code (str): BCP-47 language code for localized infoType friendly names. If omitted, or if localized @@ -1459,7 +1523,7 @@ class ListInfoTypesResponse(proto.Message): """ info_types = proto.RepeatedField( - proto.MESSAGE, number=1, message=InfoTypeDescription, + proto.MESSAGE, number=1, message="InfoTypeDescription", ) @@ -1814,12 +1878,12 @@ class DeltaPresenceEstimationConfig(proto.Message): in exactly one field of one auxiliary table. """ - quasi_ids = proto.RepeatedField(proto.MESSAGE, number=1, message=QuasiId,) + quasi_ids = proto.RepeatedField(proto.MESSAGE, number=1, message="QuasiId",) region_code = proto.Field(proto.STRING, number=2) auxiliary_tables = proto.RepeatedField( - proto.MESSAGE, number=3, message=StatisticalTable, + proto.MESSAGE, number=3, message="StatisticalTable", ) numerical_stats_config = proto.Field( @@ -1867,6 +1931,8 @@ class AnalyzeDataSourceRiskDetails(proto.Message): K-map result delta_presence_estimation_result (~.dlp.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult): Delta-presence result + requested_options (~.dlp.AnalyzeDataSourceRiskDetails.RequestedRiskAnalysisOptions): + The configuration used for this job. """ class NumericalStatsResult(proto.Message): @@ -2256,8 +2322,20 @@ class DeltaPresenceEstimationHistogramBucket(proto.Message): message="AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket", ) + class RequestedRiskAnalysisOptions(proto.Message): + r"""Risk analysis options. + + Attributes: + job_config (~.dlp.RiskAnalysisJobConfig): + The job config for the risk job. + """ + + job_config = proto.Field( + proto.MESSAGE, number=1, message="RiskAnalysisJobConfig", + ) + requested_privacy_metric = proto.Field( - proto.MESSAGE, number=1, message=PrivacyMetric, + proto.MESSAGE, number=1, message="PrivacyMetric", ) requested_source_table = proto.Field( @@ -2288,6 +2366,10 @@ class DeltaPresenceEstimationHistogramBucket(proto.Message): proto.MESSAGE, number=9, oneof="result", message=DeltaPresenceEstimationResult, ) + requested_options = proto.Field( + proto.MESSAGE, number=10, message=RequestedRiskAnalysisOptions, + ) + class ValueFrequency(proto.Message): r"""A value of a field, including its frequency. @@ -2695,7 +2777,7 @@ class ReplaceValueConfig(proto.Message): Value to replace it with. """ - new_value = proto.Field(proto.MESSAGE, number=1, message=Value,) + new_value = proto.Field(proto.MESSAGE, number=1, message="Value",) class ReplaceWithInfoTypeConfig(proto.Message): @@ -2782,7 +2864,7 @@ class CharacterMaskConfig(proto.Message): reverse_order = proto.Field(proto.BOOL, number=3) characters_to_ignore = proto.RepeatedField( - proto.MESSAGE, number=4, message=CharsToIgnore, + proto.MESSAGE, number=4, message="CharsToIgnore", ) @@ -2826,9 +2908,9 @@ class FixedSizeBucketingConfig(proto.Message): decimals works. """ - lower_bound = proto.Field(proto.MESSAGE, number=1, message=Value,) + lower_bound = proto.Field(proto.MESSAGE, number=1, message="Value",) - upper_bound = proto.Field(proto.MESSAGE, number=2, message=Value,) + upper_bound = proto.Field(proto.MESSAGE, number=2, message="Value",) bucket_size = proto.Field(proto.DOUBLE, number=3) @@ -2855,21 +2937,21 @@ class Bucket(proto.Message): values. Attributes: - min (~.dlp.Value): + min_ (~.dlp.Value): Lower bound of the range, inclusive. Type should be the same as max if used. - max (~.dlp.Value): + max_ (~.dlp.Value): Upper bound of the range, exclusive; type must match min. replacement_value (~.dlp.Value): Required. Replacement value for this bucket. """ - min = proto.Field(proto.MESSAGE, number=1, message=Value,) + min_ = proto.Field(proto.MESSAGE, number=1, message="Value",) - max = proto.Field(proto.MESSAGE, number=2, message=Value,) + max_ = proto.Field(proto.MESSAGE, number=2, message="Value",) - replacement_value = proto.Field(proto.MESSAGE, number=3, message=Value,) + replacement_value = proto.Field(proto.MESSAGE, number=3, message="Value",) buckets = proto.RepeatedField(proto.MESSAGE, number=1, message=Bucket,) @@ -2928,7 +3010,10 @@ class CryptoReplaceFfxFpeConfig(proto.Message): before/after encryption/decryption. Each character listed must appear only once. Number of characters must be in the range [2, 95]. This must be encoded as ASCII. The order of - characters does not matter. + characters does not matter. The full list of allowed + characters is: + 0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz + ~`!@#$%^&*()_-+={[}]|:;"'<,>.?/ radix (int): The native way to select the alphabet. Must be in the range [2, 95]. @@ -3108,7 +3193,7 @@ class DateShiftConfig(proto.Message): context = proto.Field(proto.MESSAGE, number=3, message=storage.FieldId,) crypto_key = proto.Field( - proto.MESSAGE, number=4, oneof="method", message=CryptoKey, + proto.MESSAGE, number=4, oneof="method", message="CryptoKey", ) @@ -3145,7 +3230,7 @@ class InfoTypeTransformation(proto.Message): ) primitive_transformation = proto.Field( - proto.MESSAGE, number=2, message=PrimitiveTransformation, + proto.MESSAGE, number=2, message="PrimitiveTransformation", ) transformations = proto.RepeatedField( @@ -3188,14 +3273,14 @@ class FieldTransformation(proto.Message): proto.MESSAGE, number=4, oneof="transformation", - message=PrimitiveTransformation, + message="PrimitiveTransformation", ) info_type_transformations = proto.Field( proto.MESSAGE, number=5, oneof="transformation", - message=InfoTypeTransformations, + message="InfoTypeTransformations", ) @@ -3214,7 +3299,7 @@ class RecordTransformations(proto.Message): """ field_transformations = proto.RepeatedField( - proto.MESSAGE, number=1, message=FieldTransformation, + proto.MESSAGE, number=1, message="FieldTransformation", ) record_suppressions = proto.RepeatedField( @@ -3282,7 +3367,7 @@ class Condition(proto.Message): operator = proto.Field(proto.ENUM, number=3, enum="RelationalOperator",) - value = proto.Field(proto.MESSAGE, number=4, message=Value,) + value = proto.Field(proto.MESSAGE, number=4, message="Value",) class Conditions(proto.Message): r"""A collection of conditions. @@ -3405,14 +3490,14 @@ class SummaryResult(proto.Message): field = proto.Field(proto.MESSAGE, number=2, message=storage.FieldId,) transformation = proto.Field( - proto.MESSAGE, number=3, message=PrimitiveTransformation, + proto.MESSAGE, number=3, message="PrimitiveTransformation", ) field_transformations = proto.RepeatedField( - proto.MESSAGE, number=5, message=FieldTransformation, + proto.MESSAGE, number=5, message="FieldTransformation", ) - record_suppress = proto.Field(proto.MESSAGE, number=6, message=RecordSuppression,) + record_suppress = proto.Field(proto.MESSAGE, number=6, message="RecordSuppression",) results = proto.RepeatedField(proto.MESSAGE, number=4, message=SummaryResult,) @@ -3485,7 +3570,7 @@ class InspectTemplate(proto.Message): update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) - inspect_config = proto.Field(proto.MESSAGE, number=6, message=InspectConfig,) + inspect_config = proto.Field(proto.MESSAGE, number=6, message="InspectConfig",) class DeidentifyTemplate(proto.Message): @@ -3525,7 +3610,9 @@ class DeidentifyTemplate(proto.Message): update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) - deidentify_config = proto.Field(proto.MESSAGE, number=6, message=DeidentifyConfig,) + deidentify_config = proto.Field( + proto.MESSAGE, number=6, message="DeidentifyConfig", + ) class Error(proto.Message): @@ -3620,10 +3707,12 @@ class Trigger(proto.Message): """ schedule = proto.Field( - proto.MESSAGE, number=1, oneof="trigger", message=Schedule, + proto.MESSAGE, number=1, oneof="trigger", message="Schedule", ) - manual = proto.Field(proto.MESSAGE, number=2, oneof="trigger", message=Manual,) + manual = proto.Field( + proto.MESSAGE, number=2, oneof="trigger", message="Manual", + ) name = proto.Field(proto.STRING, number=1) @@ -3637,7 +3726,7 @@ class Trigger(proto.Message): triggers = proto.RepeatedField(proto.MESSAGE, number=5, message=Trigger,) - errors = proto.RepeatedField(proto.MESSAGE, number=6, message=Error,) + errors = proto.RepeatedField(proto.MESSAGE, number=6, message="Error",) create_time = proto.Field(proto.MESSAGE, number=7, message=timestamp.Timestamp,) @@ -3683,7 +3772,7 @@ class SaveFindings(proto.Message): """ output_config = proto.Field( - proto.MESSAGE, number=1, message=OutputStorageConfig, + proto.MESSAGE, number=1, message="OutputStorageConfig", ) class PublishToPubSub(proto.Message): @@ -3780,16 +3869,33 @@ class CreateInspectTemplateRequest(proto.Message): parent (str): Required. Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:organizations/[ORGANIZATION-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] - - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 inspect_template (~.dlp.InspectTemplate): Required. The InspectTemplate to create. template_id (str): The template id can contain uppercase and lowercase letters, numbers, and hyphens; that is, it must match the regular - expression: ``[a-zA-Z\\d-_]+``. The maximum length is 100 + expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 characters. Can be empty to allow the system to generate one. location_id (str): @@ -3798,7 +3904,7 @@ class CreateInspectTemplateRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) - inspect_template = proto.Field(proto.MESSAGE, number=2, message=InspectTemplate,) + inspect_template = proto.Field(proto.MESSAGE, number=2, message="InspectTemplate",) template_id = proto.Field(proto.STRING, number=3) @@ -3822,7 +3928,7 @@ class UpdateInspectTemplateRequest(proto.Message): name = proto.Field(proto.STRING, number=1) - inspect_template = proto.Field(proto.MESSAGE, number=2, message=InspectTemplate,) + inspect_template = proto.Field(proto.MESSAGE, number=2, message="InspectTemplate",) update_mask = proto.Field(proto.MESSAGE, number=3, message=field_mask.FieldMask,) @@ -3848,10 +3954,27 @@ class ListInspectTemplatesRequest(proto.Message): parent (str): Required. Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:organizations/[ORGANIZATION-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] - - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 page_token (str): Page token to continue retrieval. Comes from previous call to ``ListInspectTemplates``. @@ -3907,7 +4030,7 @@ def raw_page(self): return self inspect_templates = proto.RepeatedField( - proto.MESSAGE, number=1, message=InspectTemplate, + proto.MESSAGE, number=1, message="InspectTemplate", ) next_page_token = proto.Field(proto.STRING, number=2) @@ -3934,14 +4057,28 @@ class CreateJobTriggerRequest(proto.Message): parent (str): Required. Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 job_trigger (~.dlp.JobTrigger): Required. The JobTrigger to create. trigger_id (str): The trigger id can contain uppercase and lowercase letters, numbers, and hyphens; that is, it must match the regular - expression: ``[a-zA-Z\\d-_]+``. The maximum length is 100 + expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 characters. Can be empty to allow the system to generate one. location_id (str): @@ -3950,7 +4087,7 @@ class CreateJobTriggerRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) - job_trigger = proto.Field(proto.MESSAGE, number=2, message=JobTrigger,) + job_trigger = proto.Field(proto.MESSAGE, number=2, message="JobTrigger",) trigger_id = proto.Field(proto.STRING, number=3) @@ -3985,7 +4122,7 @@ class UpdateJobTriggerRequest(proto.Message): name = proto.Field(proto.STRING, number=1) - job_trigger = proto.Field(proto.MESSAGE, number=2, message=JobTrigger,) + job_trigger = proto.Field(proto.MESSAGE, number=2, message="JobTrigger",) update_mask = proto.Field(proto.MESSAGE, number=3, message=field_mask.FieldMask,) @@ -4012,8 +4149,22 @@ class CreateDlpJobRequest(proto.Message): parent (str): Required. Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 inspect_job (~.dlp.InspectJobConfig): Set to control what and how to inspect. risk_job (~.dlp.RiskAnalysisJobConfig): @@ -4021,7 +4172,7 @@ class CreateDlpJobRequest(proto.Message): job_id (str): The job id can contain uppercase and lowercase letters, numbers, and hyphens; that is, it must match the regular - expression: ``[a-zA-Z\\d-_]+``. The maximum length is 100 + expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 characters. Can be empty to allow the system to generate one. location_id (str): @@ -4035,7 +4186,7 @@ class CreateDlpJobRequest(proto.Message): ) risk_job = proto.Field( - proto.MESSAGE, number=3, oneof="job", message=RiskAnalysisJobConfig, + proto.MESSAGE, number=3, oneof="job", message="RiskAnalysisJobConfig", ) job_id = proto.Field(proto.STRING, number=4) @@ -4050,8 +4201,22 @@ class ListJobTriggersRequest(proto.Message): parent (str): Required. Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 page_token (str): Page token to continue retrieval. Comes from previous call to ListJobTriggers. ``order_by`` field must not change for @@ -4149,7 +4314,7 @@ class ListJobTriggersResponse(proto.Message): def raw_page(self): return self - job_triggers = proto.RepeatedField(proto.MESSAGE, number=1, message=JobTrigger,) + job_triggers = proto.RepeatedField(proto.MESSAGE, number=1, message="JobTrigger",) next_page_token = proto.Field(proto.STRING, number=2) @@ -4188,11 +4353,11 @@ class InspectJobConfig(proto.Message): proto.MESSAGE, number=1, message=storage.StorageConfig, ) - inspect_config = proto.Field(proto.MESSAGE, number=2, message=InspectConfig,) + inspect_config = proto.Field(proto.MESSAGE, number=2, message="InspectConfig",) inspect_template_name = proto.Field(proto.STRING, number=3) - actions = proto.RepeatedField(proto.MESSAGE, number=4, message=Action,) + actions = proto.RepeatedField(proto.MESSAGE, number=4, message="Action",) class DlpJob(proto.Message): @@ -4201,7 +4366,7 @@ class DlpJob(proto.Message): Attributes: name (str): The server-assigned name. - type (~.dlp.DlpJobType): + type_ (~.dlp.DlpJobType): The type of job. state (~.dlp.DlpJob.JobState): State of a job. @@ -4235,16 +4400,19 @@ class JobState(proto.Enum): name = proto.Field(proto.STRING, number=1) - type = proto.Field(proto.ENUM, number=2, enum="DlpJobType",) + type_ = proto.Field(proto.ENUM, number=2, enum="DlpJobType",) state = proto.Field(proto.ENUM, number=3, enum=JobState,) risk_details = proto.Field( - proto.MESSAGE, number=4, oneof="details", message=AnalyzeDataSourceRiskDetails, + proto.MESSAGE, + number=4, + oneof="details", + message="AnalyzeDataSourceRiskDetails", ) inspect_details = proto.Field( - proto.MESSAGE, number=5, oneof="details", message=InspectDataSourceDetails, + proto.MESSAGE, number=5, oneof="details", message="InspectDataSourceDetails", ) create_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) @@ -4255,7 +4423,7 @@ class JobState(proto.Enum): job_trigger_name = proto.Field(proto.STRING, number=10) - errors = proto.RepeatedField(proto.MESSAGE, number=11, message=Error,) + errors = proto.RepeatedField(proto.MESSAGE, number=11, message="Error",) class GetDlpJobRequest(proto.Message): @@ -4276,8 +4444,22 @@ class ListDlpJobsRequest(proto.Message): parent (str): Required. Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 filter (str): Allows filtering. @@ -4323,7 +4505,7 @@ class ListDlpJobsRequest(proto.Message): The standard list page size. page_token (str): The standard list page token. - type (~.dlp.DlpJobType): + type_ (~.dlp.DlpJobType): The type of job. Defaults to ``DlpJobType.INSPECT`` order_by (str): Comma separated list of fields to order by, followed by @@ -4351,7 +4533,7 @@ class ListDlpJobsRequest(proto.Message): page_token = proto.Field(proto.STRING, number=3) - type = proto.Field(proto.ENUM, number=5, enum="DlpJobType",) + type_ = proto.Field(proto.ENUM, number=5, enum="DlpJobType",) order_by = proto.Field(proto.STRING, number=6) @@ -4373,7 +4555,7 @@ class ListDlpJobsResponse(proto.Message): def raw_page(self): return self - jobs = proto.RepeatedField(proto.MESSAGE, number=1, message=DlpJob,) + jobs = proto.RepeatedField(proto.MESSAGE, number=1, message="DlpJob",) next_page_token = proto.Field(proto.STRING, number=2) @@ -4421,16 +4603,33 @@ class CreateDeidentifyTemplateRequest(proto.Message): parent (str): Required. Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:organizations/[ORGANIZATION-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] - - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 deidentify_template (~.dlp.DeidentifyTemplate): Required. The DeidentifyTemplate to create. template_id (str): The template id can contain uppercase and lowercase letters, numbers, and hyphens; that is, it must match the regular - expression: ``[a-zA-Z\\d-_]+``. The maximum length is 100 + expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 characters. Can be empty to allow the system to generate one. location_id (str): @@ -4440,7 +4639,7 @@ class CreateDeidentifyTemplateRequest(proto.Message): parent = proto.Field(proto.STRING, number=1) deidentify_template = proto.Field( - proto.MESSAGE, number=2, message=DeidentifyTemplate, + proto.MESSAGE, number=2, message="DeidentifyTemplate", ) template_id = proto.Field(proto.STRING, number=3) @@ -4466,7 +4665,7 @@ class UpdateDeidentifyTemplateRequest(proto.Message): name = proto.Field(proto.STRING, number=1) deidentify_template = proto.Field( - proto.MESSAGE, number=2, message=DeidentifyTemplate, + proto.MESSAGE, number=2, message="DeidentifyTemplate", ) update_mask = proto.Field(proto.MESSAGE, number=3, message=field_mask.FieldMask,) @@ -4493,10 +4692,27 @@ class ListDeidentifyTemplatesRequest(proto.Message): parent (str): Required. Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:organizations/[ORGANIZATION-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] - - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 page_token (str): Page token to continue retrieval. Comes from previous call to ``ListDeidentifyTemplates``. @@ -4552,7 +4768,7 @@ def raw_page(self): return self deidentify_templates = proto.RepeatedField( - proto.MESSAGE, number=1, message=DeidentifyTemplate, + proto.MESSAGE, number=1, message="DeidentifyTemplate", ) next_page_token = proto.Field(proto.STRING, number=2) @@ -4648,7 +4864,7 @@ class StoredInfoTypeConfig(proto.Message): description = proto.Field(proto.STRING, number=2) large_custom_dictionary = proto.Field( - proto.MESSAGE, number=3, oneof="type", message=LargeCustomDictionaryConfig, + proto.MESSAGE, number=3, oneof="type", message="LargeCustomDictionaryConfig", ) dictionary = proto.Field( @@ -4673,7 +4889,7 @@ class StoredInfoTypeStats(proto.Message): """ large_custom_dictionary = proto.Field( - proto.MESSAGE, number=1, oneof="type", message=LargeCustomDictionaryStats, + proto.MESSAGE, number=1, oneof="type", message="LargeCustomDictionaryStats", ) @@ -4713,15 +4929,15 @@ class StoredInfoTypeVersion(proto.Message): Statistics about this storedInfoType version. """ - config = proto.Field(proto.MESSAGE, number=1, message=StoredInfoTypeConfig,) + config = proto.Field(proto.MESSAGE, number=1, message="StoredInfoTypeConfig",) create_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) state = proto.Field(proto.ENUM, number=3, enum="StoredInfoTypeState",) - errors = proto.RepeatedField(proto.MESSAGE, number=4, message=Error,) + errors = proto.RepeatedField(proto.MESSAGE, number=4, message="Error",) - stats = proto.Field(proto.MESSAGE, number=5, message=StoredInfoTypeStats,) + stats = proto.Field(proto.MESSAGE, number=5, message="StoredInfoTypeStats",) class StoredInfoType(proto.Message): @@ -4741,11 +4957,11 @@ class StoredInfoType(proto.Message): name = proto.Field(proto.STRING, number=1) current_version = proto.Field( - proto.MESSAGE, number=2, message=StoredInfoTypeVersion, + proto.MESSAGE, number=2, message="StoredInfoTypeVersion", ) pending_versions = proto.RepeatedField( - proto.MESSAGE, number=3, message=StoredInfoTypeVersion, + proto.MESSAGE, number=3, message="StoredInfoTypeVersion", ) @@ -4756,26 +4972,43 @@ class CreateStoredInfoTypeRequest(proto.Message): parent (str): Required. Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:organizations/[ORGANIZATION-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] - - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 config (~.dlp.StoredInfoTypeConfig): Required. Configuration of the storedInfoType to create. stored_info_type_id (str): The storedInfoType ID can contain uppercase and lowercase letters, numbers, and hyphens; that is, it must match the - regular expression: ``[a-zA-Z\\d-_]+``. The maximum length - is 100 characters. Can be empty to allow the system to - generate one. + regular expression: ``[a-zA-Z\d-_]+``. The maximum length is + 100 characters. Can be empty to allow the system to generate + one. location_id (str): Deprecated. This field has no effect. """ parent = proto.Field(proto.STRING, number=1) - config = proto.Field(proto.MESSAGE, number=2, message=StoredInfoTypeConfig,) + config = proto.Field(proto.MESSAGE, number=2, message="StoredInfoTypeConfig",) stored_info_type_id = proto.Field(proto.STRING, number=3) @@ -4802,7 +5035,7 @@ class UpdateStoredInfoTypeRequest(proto.Message): name = proto.Field(proto.STRING, number=1) - config = proto.Field(proto.MESSAGE, number=2, message=StoredInfoTypeConfig,) + config = proto.Field(proto.MESSAGE, number=2, message="StoredInfoTypeConfig",) update_mask = proto.Field(proto.MESSAGE, number=3, message=field_mask.FieldMask,) @@ -4828,10 +5061,27 @@ class ListStoredInfoTypesRequest(proto.Message): parent (str): Required. Parent resource name. - - Format:projects/[PROJECT-ID] - - Format:organizations/[ORGANIZATION-ID] - - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] - - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 page_token (str): Page token to continue retrieval. Comes from previous call to ``ListStoredInfoTypes``. @@ -4887,7 +5137,7 @@ def raw_page(self): return self stored_info_types = proto.RepeatedField( - proto.MESSAGE, number=1, message=StoredInfoType, + proto.MESSAGE, number=1, message="StoredInfoType", ) next_page_token = proto.Field(proto.STRING, number=2) @@ -4955,7 +5205,7 @@ class HybridContentItem(proto.Message): to each finding. """ - item = proto.Field(proto.MESSAGE, number=1, message=ContentItem,) + item = proto.Field(proto.MESSAGE, number=1, message="ContentItem",) finding_details = proto.Field( proto.MESSAGE, number=2, message="HybridFindingDetails", @@ -5011,7 +5261,7 @@ class HybridFindingDetails(proto.Message): - ``"pipeline" : "etl"`` """ - container_details = proto.Field(proto.MESSAGE, number=1, message=Container,) + container_details = proto.Field(proto.MESSAGE, number=1, message="Container",) file_offset = proto.Field(proto.INT64, number=2) diff --git a/google/cloud/dlp_v2/types/storage.py b/google/cloud/dlp_v2/types/storage.py index c7a1e455..c4cd5ac4 100644 --- a/google/cloud/dlp_v2/types/storage.py +++ b/google/cloud/dlp_v2/types/storage.py @@ -88,8 +88,9 @@ class InfoType(proto.Message): Name of the information type. Either a name of your choosing when creating a CustomInfoType, or one of the names listed at https://cloud.google.com/dlp/docs/infotypes-reference - when specifying a built-in type. InfoType names should - conform to the pattern ``[a-zA-Z0-9_]{1,64}``. + when specifying a built-in type. When sending Cloud DLP + results to Data Catalog, infoType names should conform to + the pattern ``[A-Za-z0-9$-_]{1,64}``. """ name = proto.Field(proto.STRING, number=1) @@ -355,7 +356,7 @@ class HotwordRule(proto.Message): message="CustomInfoType.DetectionRule.HotwordRule", ) - info_type = proto.Field(proto.MESSAGE, number=1, message=InfoType,) + info_type = proto.Field(proto.MESSAGE, number=1, message="InfoType",) likelihood = proto.Field(proto.ENUM, number=6, enum="Likelihood",) @@ -368,7 +369,7 @@ class HotwordRule(proto.Message): ) stored_type = proto.Field( - proto.MESSAGE, number=5, oneof="type", message=StoredType, + proto.MESSAGE, number=5, oneof="type", message="StoredType", ) detection_rules = proto.RepeatedField( @@ -435,9 +436,9 @@ class DatastoreOptions(proto.Message): The kind to process. """ - partition_id = proto.Field(proto.MESSAGE, number=1, message=PartitionId,) + partition_id = proto.Field(proto.MESSAGE, number=1, message="PartitionId",) - kind = proto.Field(proto.MESSAGE, number=2, message=KindExpression,) + kind = proto.Field(proto.MESSAGE, number=2, message="KindExpression",) class CloudStorageRegexFileSet(proto.Message): @@ -580,7 +581,7 @@ class FileSet(proto.Message): url = proto.Field(proto.STRING, number=1) regex_file_set = proto.Field( - proto.MESSAGE, number=2, message=CloudStorageRegexFileSet, + proto.MESSAGE, number=2, message="CloudStorageRegexFileSet", ) file_set = proto.Field(proto.MESSAGE, number=1, message=FileSet,) @@ -657,7 +658,7 @@ class BigQueryOptions(proto.Message): class SampleMethod(proto.Enum): r"""How to sample rows if not all rows are scanned. Meaningful only when used in conjunction with either rows_limit or rows_limit_percent. If - not specified, scanning would start from the top. + not specified, rows are scanned in the order BigQuery reads them. """ SAMPLE_METHOD_UNSPECIFIED = 0 TOP = 1 @@ -665,7 +666,9 @@ class SampleMethod(proto.Enum): table_reference = proto.Field(proto.MESSAGE, number=1, message="BigQueryTable",) - identifying_fields = proto.RepeatedField(proto.MESSAGE, number=2, message=FieldId,) + identifying_fields = proto.RepeatedField( + proto.MESSAGE, number=2, message="FieldId", + ) rows_limit = proto.Field(proto.INT64, number=3) @@ -673,7 +676,7 @@ class SampleMethod(proto.Enum): sample_method = proto.Field(proto.ENUM, number=4, enum=SampleMethod,) - excluded_fields = proto.RepeatedField(proto.MESSAGE, number=5, message=FieldId,) + excluded_fields = proto.RepeatedField(proto.MESSAGE, number=5, message="FieldId",) class StorageConfig(proto.Message): @@ -704,26 +707,33 @@ class TimespanConfig(proto.Message): Attributes: start_time (~.timestamp.Timestamp): - Exclude files or rows older than this value. + Exclude files, tables, or rows older than + this value. If not set, no lower time limit is + applied. end_time (~.timestamp.Timestamp): - Exclude files or rows newer than this value. - If set to zero, no upper time limit is applied. + Exclude files, tables, or rows newer than + this value. If not set, no upper time limit is + applied. timestamp_field (~.storage.FieldId): Specification of the field containing the timestamp of scanned items. Used for data sources like Datastore and BigQuery. - For BigQuery: Required to filter out rows based on the given - start and end times. If not specified and the table was - modified between the given start and end times, the entire - table will be scanned. The valid data types of the timestamp - field are: ``INTEGER``, ``DATE``, ``TIMESTAMP``, or - ``DATETIME`` BigQuery column. - - For Datastore. Valid data types of the timestamp field are: - ``TIMESTAMP``. Datastore entity will be scanned if the - timestamp property does not exist or its value is empty or - invalid. + For BigQuery: If this value is not specified and the table + was modified between the given start and end times, the + entire table will be scanned. If this value is specified, + then rows are filtered based on the given start and end + times. Rows with a ``NULL`` value in the provided BigQuery + column are skipped. Valid data types of the provided + BigQuery column are: ``INTEGER``, ``DATE``, ``TIMESTAMP``, + and ``DATETIME``. + + For Datastore: If this value is specified, then entities are + filtered based on the given start and end times. If an + entity does not contain the provided timestamp property or + contains empty or invalid values, then it is included. Valid + data types of the provided timestamp property are: + ``TIMESTAMP``. enable_auto_population_of_timespan_config (bool): When the job is started by a JobTrigger we will automatically figure out a valid start_time to avoid @@ -736,20 +746,20 @@ class TimespanConfig(proto.Message): end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) - timestamp_field = proto.Field(proto.MESSAGE, number=3, message=FieldId,) + timestamp_field = proto.Field(proto.MESSAGE, number=3, message="FieldId",) enable_auto_population_of_timespan_config = proto.Field(proto.BOOL, number=4) datastore_options = proto.Field( - proto.MESSAGE, number=2, oneof="type", message=DatastoreOptions, + proto.MESSAGE, number=2, oneof="type", message="DatastoreOptions", ) cloud_storage_options = proto.Field( - proto.MESSAGE, number=3, oneof="type", message=CloudStorageOptions, + proto.MESSAGE, number=3, oneof="type", message="CloudStorageOptions", ) big_query_options = proto.Field( - proto.MESSAGE, number=4, oneof="type", message=BigQueryOptions, + proto.MESSAGE, number=4, oneof="type", message="BigQueryOptions", ) hybrid_options = proto.Field( @@ -897,7 +907,7 @@ class PathElement(proto.Message): name = proto.Field(proto.STRING, number=3, oneof="id_type") - partition_id = proto.Field(proto.MESSAGE, number=1, message=PartitionId,) + partition_id = proto.Field(proto.MESSAGE, number=1, message="PartitionId",) path = proto.RepeatedField(proto.MESSAGE, number=2, message=PathElement,) @@ -918,11 +928,11 @@ class RecordKey(proto.Message): """ datastore_key = proto.Field( - proto.MESSAGE, number=2, oneof="type", message=DatastoreKey, + proto.MESSAGE, number=2, oneof="type", message="DatastoreKey", ) big_query_key = proto.Field( - proto.MESSAGE, number=3, oneof="type", message=BigQueryKey, + proto.MESSAGE, number=3, oneof="type", message="BigQueryKey", ) id_values = proto.RepeatedField(proto.STRING, number=5) @@ -963,9 +973,9 @@ class BigQueryField(proto.Message): Designated field in the BigQuery table. """ - table = proto.Field(proto.MESSAGE, number=1, message=BigQueryTable,) + table = proto.Field(proto.MESSAGE, number=1, message="BigQueryTable",) - field = proto.Field(proto.MESSAGE, number=2, message=FieldId,) + field = proto.Field(proto.MESSAGE, number=2, message="FieldId",) class EntityId(proto.Message): @@ -982,7 +992,7 @@ class EntityId(proto.Message): the entity identifier. """ - field = proto.Field(proto.MESSAGE, number=1, message=FieldId,) + field = proto.Field(proto.MESSAGE, number=1, message="FieldId",) class TableOptions(proto.Message): @@ -998,7 +1008,9 @@ class TableOptions(proto.Message): more than 3 may be provided. """ - identifying_fields = proto.RepeatedField(proto.MESSAGE, number=1, message=FieldId,) + identifying_fields = proto.RepeatedField( + proto.MESSAGE, number=1, message="FieldId", + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/noxfile.py b/noxfile.py index 2ad172f6..b776ccc5 100644 --- a/noxfile.py +++ b/noxfile.py @@ -28,7 +28,7 @@ DEFAULT_PYTHON_VERSION = "3.8" SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] -UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"] +UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"] @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -72,7 +72,9 @@ def default(session): # Install all test dependencies, then install this package in-place. session.install("asyncmock", "pytest-asyncio") - session.install("mock", "pytest", "pytest-cov") + session.install( + "mock", "pytest", "pytest-cov", + ) session.install("-e", ".") # Run py.test against the unit tests. @@ -174,7 +176,9 @@ def docfx(session): """Build the docfx yaml files for this library.""" session.install("-e", ".") - session.install("sphinx", "alabaster", "recommonmark", "sphinx-docfx-yaml") + # sphinx-docfx-yaml supports up to sphinx version 1.5.5. + # https://github.com/docascode/sphinx-docfx-yaml/issues/97 + session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml") shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( diff --git a/samples/snippets/README.rst b/samples/snippets/README.rst index 1546fc25..0b25cc7a 100644 --- a/samples/snippets/README.rst +++ b/samples/snippets/README.rst @@ -5,7 +5,7 @@ Google Data Loss Prevention Python Samples =============================================================================== .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/python-dlp&page=editor&open_in_editor=samples/snippets/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=dlp/README.rst This directory contains samples for Google Data Loss Prevention. `Google Data Loss Prevention`_ provides programmatic access to a powerful detection engine for personally identifiable information and other privacy-sensitive data in unstructured data streams. @@ -37,12 +37,11 @@ credentials for applications. Install Dependencies ++++++++++++++++++++ -#. Clone python-dlp and change directory to the sample directory you want to use. +#. Clone python-docs-samples and change directory to the sample directory you want to use. .. code-block:: bash - $ git clone https://github.com/googleapis/python-dlp.git - $ cd samples/snippets + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git #. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. @@ -78,7 +77,7 @@ Quickstart +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/python-dlp&page=editor&open_in_editor=samples/snippets/quickstart.py,samples/snippets/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=dlp/quickstart.py,dlp/README.rst @@ -96,7 +95,7 @@ Inspect Content +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/python-dlp&page=editor&open_in_editor=samples/snippets/inspect_content.py,samples/snippets/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=dlp/inspect_content.py,dlp/README.rst @@ -134,7 +133,7 @@ Redact Content +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/python-dlp&page=editor&open_in_editor=samples/snippets/redact.py,samples/snippets/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=dlp/redact.py,dlp/README.rst @@ -170,7 +169,7 @@ Metadata +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/python-dlp&page=editor&open_in_editor=samples/snippets/metadata.py,samples/snippets/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=dlp/metadata.py,dlp/README.rst @@ -203,7 +202,7 @@ Jobs +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/python-dlp&page=editor&open_in_editor=samples/snippets/jobs.py,samples/snippets/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=dlp/jobs.py,dlp/README.rst @@ -236,7 +235,7 @@ Templates +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/python-dlp&page=editor&open_in_editor=samples/snippets/templates.py,samples/snippets/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=dlp/templates.py,dlp/README.rst @@ -269,7 +268,7 @@ Triggers +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/python-dlp&page=editor&open_in_editor=samples/snippets/triggers.py,samples/snippets/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=dlp/triggers.py,dlp/README.rst @@ -302,7 +301,7 @@ Risk Analysis +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/python-dlp&page=editor&open_in_editor=samples/snippets/risk.py,samples/snippets/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=dlp/risk.py,dlp/README.rst @@ -341,7 +340,7 @@ DeID +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/python-dlp&page=editor&open_in_editor=samples/snippets/deid.py,samples/snippets/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor=dlp/deid.py,dlp/README.rst diff --git a/samples/snippets/inspect_content.py b/samples/snippets/inspect_content.py index bceb2981..4b048727 100644 --- a/samples/snippets/inspect_content.py +++ b/samples/snippets/inspect_content.py @@ -401,7 +401,7 @@ def inspect_file( # Construct the item, containing the file's byte data. with open(filename, mode="rb") as f: - item = {"byte_item": {"type": content_type_index, "data": f.read()}} + item = {"byte_item": {"type_": content_type_index, "data": f.read()}} # Convert the project id into a full resource id. parent = f"projects/{project}" diff --git a/samples/snippets/jobs.py b/samples/snippets/jobs.py index 971aa7f6..0bf77104 100644 --- a/samples/snippets/jobs.py +++ b/samples/snippets/jobs.py @@ -75,7 +75,7 @@ def list_dlp_jobs(project, filter_string=None, job_type=None): # Call the API to get a list of jobs. response = dlp.list_dlp_jobs( - request={"parent": parent, "filter": filter_string, "type": job_type} + request={"parent": parent, "filter": filter_string, "type_": job_type} ) # Iterate over results. diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index 5660f08b..b90eef00 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -37,22 +37,28 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - "ignored_versions": ["2.7"], + 'ignored_versions': ["2.7"], + + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + 'enforce_type_hints': False, + # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - "envs": {}, + 'envs': {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append(".") + sys.path.append('.') from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -67,12 +73,12 @@ def get_pytest_env_vars(): ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG["gcloud_project_env"] + env_key = TEST_CONFIG['gcloud_project_env'] # This should error out if not set. - ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG["envs"]) + ret.update(TEST_CONFIG['envs']) return ret @@ -81,7 +87,7 @@ def get_pytest_env_vars(): ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] +IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) @@ -130,16 +136,29 @@ def _determine_local_import_names(start_dir): @nox.session def lint(session): - session.install("flake8", "flake8-import-order") + if not TEST_CONFIG['enforce_type_hints']: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") local_names = _determine_local_import_names(".") args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - ".", + "." ] session.run("flake8", *args) +# +# Black +# + +@nox.session +def blacken(session): + session.install("black") + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) # # Sample Tests @@ -180,9 +199,9 @@ def py(session): if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip( - "SKIPPED: {} tests are disabled for this sample.".format(session.python) - ) + session.skip("SKIPPED: {} tests are disabled for this sample.".format( + session.python + )) # @@ -199,6 +218,11 @@ def _get_repo_root(): break if Path(p / ".git").exists(): return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) p = p.parent raise Exception("Unable to detect repository root.") diff --git a/samples/snippets/redact.py b/samples/snippets/redact.py index b8307530..d99eee52 100644 --- a/samples/snippets/redact.py +++ b/samples/snippets/redact.py @@ -91,7 +91,7 @@ def redact_image( # Construct the byte_item, containing the file's byte data. with open(filename, mode="rb") as f: - byte_item = {"type": content_type_index, "data": f.read()} + byte_item = {"type_": content_type_index, "data": f.read()} # Convert the project id into a full resource id. parent = f"projects/{project}" @@ -146,7 +146,7 @@ def redact_image_all_text( # Construct the byte_item, containing the file's byte data. with open(filename, mode="rb") as f: - byte_item = {"type": google.cloud.dlp_v2.FileType.IMAGE, "data": f.read()} + byte_item = {"type_": google.cloud.dlp_v2.FileType.IMAGE, "data": f.read()} # Convert the project id into a full resource id. parent = f"projects/{project}" diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh index ff599eb2..21f6d2a2 100755 --- a/scripts/decrypt-secrets.sh +++ b/scripts/decrypt-secrets.sh @@ -20,14 +20,27 @@ ROOT=$( dirname "$DIR" ) # Work from the project root. cd $ROOT +# Prevent it from overriding files. +# We recommend that sample authors use their own service account files and cloud project. +# In that case, they are supposed to prepare these files by themselves. +if [[ -f "testing/test-env.sh" ]] || \ + [[ -f "testing/service-account.json" ]] || \ + [[ -f "testing/client-secrets.json" ]]; then + echo "One or more target files exist, aborting." + exit 1 +fi + # Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + --project="${PROJECT_ID}" \ > testing/test-env.sh gcloud secrets versions access latest \ --secret="python-docs-samples-service-account" \ + --project="${PROJECT_ID}" \ > testing/service-account.json gcloud secrets versions access latest \ --secret="python-docs-samples-client-secrets" \ - > testing/client-secrets.json \ No newline at end of file + --project="${PROJECT_ID}" \ + > testing/client-secrets.json diff --git a/scripts/fixup_dlp_v2_keywords.py b/scripts/fixup_dlp_v2_keywords.py index 5acfcdbe..f1419971 100644 --- a/scripts/fixup_dlp_v2_keywords.py +++ b/scripts/fixup_dlp_v2_keywords.py @@ -1,3 +1,4 @@ +#! /usr/bin/env python3 # -*- coding: utf-8 -*- # Copyright 2020 Google LLC @@ -63,7 +64,7 @@ class dlpCallTransformer(cst.CSTTransformer): 'hybrid_inspect_job_trigger': ('name', 'hybrid_item', ), 'inspect_content': ('parent', 'inspect_config', 'item', 'inspect_template_name', 'location_id', ), 'list_deidentify_templates': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), - 'list_dlp_jobs': ('parent', 'filter', 'page_size', 'page_token', 'type', 'order_by', 'location_id', ), + 'list_dlp_jobs': ('parent', 'filter', 'page_size', 'page_token', 'type_', 'order_by', 'location_id', ), 'list_info_types': ('parent', 'language_code', 'filter', 'location_id', ), 'list_inspect_templates': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), 'list_job_triggers': ('parent', 'page_token', 'page_size', 'order_by', 'filter', 'location_id', ), diff --git a/scripts/readme-gen/templates/README.tmpl.rst b/scripts/readme-gen/templates/README.tmpl.rst index 8bc8488c..4fd23976 100644 --- a/scripts/readme-gen/templates/README.tmpl.rst +++ b/scripts/readme-gen/templates/README.tmpl.rst @@ -6,7 +6,7 @@ =============================================================================== .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/python-dlp&page=editor&open_in_editor={{folder}}/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst This directory contains samples for {{product.name}}. {{product.description}} @@ -46,7 +46,7 @@ Samples {% if not sample.hide_cloudshell_button %} .. image:: https://gstatic.com/cloudssh/images/open-btn.png - :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/googleapis/python-dlp&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst {% endif %} @@ -84,4 +84,4 @@ to `browse the source`_ and `report issues`_. {% endif %} -.. _Google Cloud SDK: https://cloud.google.com/sdk/ +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/scripts/readme-gen/templates/install_deps.tmpl.rst b/scripts/readme-gen/templates/install_deps.tmpl.rst index 7e8de6dd..a0406dba 100644 --- a/scripts/readme-gen/templates/install_deps.tmpl.rst +++ b/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -1,12 +1,11 @@ Install Dependencies ++++++++++++++++++++ -#. Clone python-dlp and change directory to the sample directory you want to use. +#. Clone python-docs-samples and change directory to the sample directory you want to use. .. code-block:: bash - $ git clone https://github.com/googleapis/python-dlp.git - $ cd samples/snippets + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git #. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. diff --git a/synth.metadata b/synth.metadata index 089ccad8..a5606163 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,21 +4,29 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-dlp.git", - "sha": "419772863bfa747cf0ef6278a931f95da70c954f" + "sha": "908a8d3e2ee741ebdbc78359a18c7e8a1255c5ea" + } + }, + { + "git": { + "name": "googleapis", + "remote": "https://github.com/googleapis/googleapis.git", + "sha": "3f87da2ed1ddc3566ef0810c4fc06a2682cc9f5f", + "internalRef": "343022252" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "d3049e66447b44dc10579e461d5e08e0e3838edd" + "sha": "e89175cf074dccc4babb4eca66ae913696e47a71" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "d3049e66447b44dc10579e461d5e08e0e3838edd" + "sha": "e89175cf074dccc4babb4eca66ae913696e47a71" } } ], @@ -32,5 +40,98 @@ "generator": "bazel" } } + ], + "generatedFiles": [ + ".flake8", + ".github/CONTRIBUTING.md", + ".github/ISSUE_TEMPLATE/bug_report.md", + ".github/ISSUE_TEMPLATE/feature_request.md", + ".github/ISSUE_TEMPLATE/support_request.md", + ".github/PULL_REQUEST_TEMPLATE.md", + ".github/release-please.yml", + ".github/snippet-bot.yml", + ".gitignore", + ".kokoro/build.sh", + ".kokoro/continuous/common.cfg", + ".kokoro/continuous/continuous.cfg", + ".kokoro/docker/docs/Dockerfile", + ".kokoro/docker/docs/fetch_gpg_keys.sh", + ".kokoro/docs/common.cfg", + ".kokoro/docs/docs-presubmit.cfg", + ".kokoro/docs/docs.cfg", + ".kokoro/populate-secrets.sh", + ".kokoro/presubmit/common.cfg", + ".kokoro/presubmit/presubmit.cfg", + ".kokoro/publish-docs.sh", + ".kokoro/release.sh", + ".kokoro/release/common.cfg", + ".kokoro/release/release.cfg", + ".kokoro/samples/lint/common.cfg", + ".kokoro/samples/lint/continuous.cfg", + ".kokoro/samples/lint/periodic.cfg", + ".kokoro/samples/lint/presubmit.cfg", + ".kokoro/samples/python3.6/common.cfg", + ".kokoro/samples/python3.6/continuous.cfg", + ".kokoro/samples/python3.6/periodic.cfg", + ".kokoro/samples/python3.6/presubmit.cfg", + ".kokoro/samples/python3.7/common.cfg", + ".kokoro/samples/python3.7/continuous.cfg", + ".kokoro/samples/python3.7/periodic.cfg", + ".kokoro/samples/python3.7/presubmit.cfg", + ".kokoro/samples/python3.8/common.cfg", + ".kokoro/samples/python3.8/continuous.cfg", + ".kokoro/samples/python3.8/periodic.cfg", + ".kokoro/samples/python3.8/presubmit.cfg", + ".kokoro/test-samples.sh", + ".kokoro/trampoline.sh", + ".kokoro/trampoline_v2.sh", + ".trampolinerc", + "CODE_OF_CONDUCT.md", + "CONTRIBUTING.rst", + "LICENSE", + "MANIFEST.in", + "docs/_static/custom.css", + "docs/_templates/layout.html", + "docs/conf.py", + "docs/dlp_v2/services.rst", + "docs/dlp_v2/types.rst", + "docs/multiprocessing.rst", + "google/cloud/dlp/__init__.py", + "google/cloud/dlp/py.typed", + "google/cloud/dlp_v2/__init__.py", + "google/cloud/dlp_v2/proto/dlp.proto", + "google/cloud/dlp_v2/proto/storage.proto", + "google/cloud/dlp_v2/py.typed", + "google/cloud/dlp_v2/services/__init__.py", + "google/cloud/dlp_v2/services/dlp_service/__init__.py", + "google/cloud/dlp_v2/services/dlp_service/async_client.py", + "google/cloud/dlp_v2/services/dlp_service/client.py", + "google/cloud/dlp_v2/services/dlp_service/pagers.py", + "google/cloud/dlp_v2/services/dlp_service/transports/__init__.py", + "google/cloud/dlp_v2/services/dlp_service/transports/base.py", + "google/cloud/dlp_v2/services/dlp_service/transports/grpc.py", + "google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py", + "google/cloud/dlp_v2/types/__init__.py", + "google/cloud/dlp_v2/types/dlp.py", + "google/cloud/dlp_v2/types/storage.py", + "mypy.ini", + "noxfile.py", + "renovate.json", + "samples/AUTHORING_GUIDE.md", + "samples/CONTRIBUTING.md", + "samples/snippets/README.rst", + "samples/snippets/noxfile.py", + "scripts/decrypt-secrets.sh", + "scripts/fixup_dlp_v2_keywords.py", + "scripts/readme-gen/readme_gen.py", + "scripts/readme-gen/templates/README.tmpl.rst", + "scripts/readme-gen/templates/auth.tmpl.rst", + "scripts/readme-gen/templates/auth_api_key.tmpl.rst", + "scripts/readme-gen/templates/install_deps.tmpl.rst", + "scripts/readme-gen/templates/install_portaudio.tmpl.rst", + "setup.cfg", + "testing/.gitignore", + "tests/unit/gapic/dlp_v2/__init__.py", + "tests/unit/gapic/dlp_v2/test_dlp_service.py" ] } \ No newline at end of file diff --git a/tests/unit/gapic/dlp_v2/test_dlp_service.py b/tests/unit/gapic/dlp_v2/test_dlp_service.py index 040a033a..7c5dd868 100644 --- a/tests/unit/gapic/dlp_v2/test_dlp_service.py +++ b/tests/unit/gapic/dlp_v2/test_dlp_service.py @@ -39,7 +39,7 @@ from google.cloud.dlp_v2.types import dlp from google.cloud.dlp_v2.types import storage from google.oauth2 import service_account -from google.protobuf import any_pb2 as any # type: ignore +from google.protobuf import any_pb2 as gp_any # type: ignore from google.protobuf import duration_pb2 as duration # type: ignore from google.protobuf import field_mask_pb2 as field_mask # type: ignore from google.protobuf import timestamp_pb2 as timestamp # type: ignore @@ -98,12 +98,12 @@ def test_dlp_service_client_from_service_account_file(client_class): ) as factory: factory.return_value = creds client = client_class.from_service_account_file("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds client = client_class.from_service_account_json("dummy/file/path.json") - assert client._transport._credentials == creds + assert client.transport._credentials == creds - assert client._transport._host == "dlp.googleapis.com:443" + assert client.transport._host == "dlp.googleapis.com:443" def test_dlp_service_client_get_transport_class(): @@ -157,14 +157,14 @@ def test_dlp_service_client_client_options( credentials_file=None, host="squid.clam.whelk", scopes=None, - api_mtls_endpoint="squid.clam.whelk", - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class() @@ -173,14 +173,14 @@ def test_dlp_service_client_client_options( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class() @@ -189,90 +189,173 @@ def test_dlp_service_client_client_options( credentials_file=None, host=client.DEFAULT_MTLS_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and client_cert_source is provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", "true"), + ( + DlpServiceAsyncClient, + transports.DlpServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", "false"), + ( + DlpServiceAsyncClient, + transports.DlpServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ], +) +@mock.patch.object( + DlpServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceClient) +) +@mock.patch.object( + DlpServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DlpServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_dlp_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): options = client_options.ClientOptions( client_cert_source=client_cert_source_callback ) with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=client_cert_source_callback, - quota_project_id=None, - ) - - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", and default_client_cert_source is provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): - with mock.patch.object(transport_class, "__init__") as patched: + ssl_channel_creds = mock.Mock() with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=True, + "grpc.ssl_channel_credentials", return_value=ssl_channel_creds ): patched.return_value = None - client = client_class() + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_ssl_channel_creds = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_ssl_channel_creds = ssl_channel_creds + expected_host = client.DEFAULT_MTLS_ENDPOINT + patched.assert_called_once_with( credentials=None, credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, + host=expected_host, scopes=None, - api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=expected_ssl_channel_creds, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) - # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is - # "auto", but client_cert_source and default_client_cert_source are None. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): with mock.patch.object(transport_class, "__init__") as patched: with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=False, + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None ): - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, - quota_project_id=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class() - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, - quota_project_id="octopus", - ) + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.ssl_credentials", + new_callable=mock.PropertyMock, + ) as ssl_credentials_mock: + if use_client_cert_env == "false": + is_mtls_mock.return_value = False + ssl_credentials_mock.return_value = None + expected_host = client.DEFAULT_ENDPOINT + expected_ssl_channel_creds = None + else: + is_mtls_mock.return_value = True + ssl_credentials_mock.return_value = mock.Mock() + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_ssl_channel_creds = ( + ssl_credentials_mock.return_value + ) + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + ssl_channel_credentials=expected_ssl_channel_creds, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.grpc.SslCredentials.__init__", return_value=None + ): + with mock.patch( + "google.auth.transport.grpc.SslCredentials.is_mtls", + new_callable=mock.PropertyMock, + ) as is_mtls_mock: + is_mtls_mock.return_value = False + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + ssl_channel_credentials=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) @pytest.mark.parametrize( @@ -299,9 +382,9 @@ def test_dlp_service_client_client_options_scopes( credentials_file=None, host=client.DEFAULT_ENDPOINT, scopes=["1", "2"], - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -329,9 +412,9 @@ def test_dlp_service_client_client_options_credentials_file( credentials_file="credentials.json", host=client.DEFAULT_ENDPOINT, scopes=None, - api_mtls_endpoint=client.DEFAULT_ENDPOINT, - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -346,9 +429,9 @@ def test_dlp_service_client_client_options_from_dict(): credentials_file=None, host="squid.clam.whelk", scopes=None, - api_mtls_endpoint="squid.clam.whelk", - client_cert_source=None, + ssl_channel_credentials=None, quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, ) @@ -364,7 +447,7 @@ def test_inspect_content( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.inspect_content), "__call__") as call: + with mock.patch.object(type(client.transport.inspect_content), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = dlp.InspectContentResponse() @@ -377,6 +460,7 @@ def test_inspect_content( assert args[0] == dlp.InspectContentRequest() # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectContentResponse) @@ -385,19 +469,19 @@ def test_inspect_content_from_dict(): @pytest.mark.asyncio -async def test_inspect_content_async(transport: str = "grpc_asyncio"): +async def test_inspect_content_async( + transport: str = "grpc_asyncio", request_type=dlp.InspectContentRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.InspectContentRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.inspect_content), "__call__" - ) as call: + with mock.patch.object(type(client.transport.inspect_content), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.InspectContentResponse() @@ -409,12 +493,17 @@ async def test_inspect_content_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.InspectContentRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.InspectContentResponse) +@pytest.mark.asyncio +async def test_inspect_content_async_from_dict(): + await test_inspect_content_async(request_type=dict) + + def test_inspect_content_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -424,7 +513,7 @@ def test_inspect_content_field_headers(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.inspect_content), "__call__") as call: + with mock.patch.object(type(client.transport.inspect_content), "__call__") as call: call.return_value = dlp.InspectContentResponse() client.inspect_content(request) @@ -449,9 +538,7 @@ async def test_inspect_content_field_headers_async(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.inspect_content), "__call__" - ) as call: + with mock.patch.object(type(client.transport.inspect_content), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.InspectContentResponse() ) @@ -478,7 +565,7 @@ def test_redact_image(transport: str = "grpc", request_type=dlp.RedactImageReque request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.redact_image), "__call__") as call: + with mock.patch.object(type(client.transport.redact_image), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = dlp.RedactImageResponse( redacted_image=b"redacted_image_blob", @@ -494,6 +581,7 @@ def test_redact_image(transport: str = "grpc", request_type=dlp.RedactImageReque assert args[0] == dlp.RedactImageRequest() # Establish that the response is the type that we expect. + assert isinstance(response, dlp.RedactImageResponse) assert response.redacted_image == b"redacted_image_blob" @@ -506,19 +594,19 @@ def test_redact_image_from_dict(): @pytest.mark.asyncio -async def test_redact_image_async(transport: str = "grpc_asyncio"): +async def test_redact_image_async( + transport: str = "grpc_asyncio", request_type=dlp.RedactImageRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.RedactImageRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.redact_image), "__call__" - ) as call: + with mock.patch.object(type(client.transport.redact_image), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.RedactImageResponse( @@ -533,7 +621,7 @@ async def test_redact_image_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.RedactImageRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.RedactImageResponse) @@ -543,6 +631,11 @@ async def test_redact_image_async(transport: str = "grpc_asyncio"): assert response.extracted_text == "extracted_text_value" +@pytest.mark.asyncio +async def test_redact_image_async_from_dict(): + await test_redact_image_async(request_type=dict) + + def test_redact_image_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -552,7 +645,7 @@ def test_redact_image_field_headers(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.redact_image), "__call__") as call: + with mock.patch.object(type(client.transport.redact_image), "__call__") as call: call.return_value = dlp.RedactImageResponse() client.redact_image(request) @@ -577,9 +670,7 @@ async def test_redact_image_field_headers_async(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.redact_image), "__call__" - ) as call: + with mock.patch.object(type(client.transport.redact_image), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.RedactImageResponse() ) @@ -609,7 +700,7 @@ def test_deidentify_content( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.deidentify_content), "__call__" + type(client.transport.deidentify_content), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.DeidentifyContentResponse() @@ -623,6 +714,7 @@ def test_deidentify_content( assert args[0] == dlp.DeidentifyContentRequest() # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyContentResponse) @@ -631,18 +723,20 @@ def test_deidentify_content_from_dict(): @pytest.mark.asyncio -async def test_deidentify_content_async(transport: str = "grpc_asyncio"): +async def test_deidentify_content_async( + transport: str = "grpc_asyncio", request_type=dlp.DeidentifyContentRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.DeidentifyContentRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.deidentify_content), "__call__" + type(client.transport.deidentify_content), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -655,12 +749,17 @@ async def test_deidentify_content_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.DeidentifyContentRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.DeidentifyContentResponse) +@pytest.mark.asyncio +async def test_deidentify_content_async_from_dict(): + await test_deidentify_content_async(request_type=dict) + + def test_deidentify_content_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -671,7 +770,7 @@ def test_deidentify_content_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.deidentify_content), "__call__" + type(client.transport.deidentify_content), "__call__" ) as call: call.return_value = dlp.DeidentifyContentResponse() @@ -698,7 +797,7 @@ async def test_deidentify_content_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.deidentify_content), "__call__" + type(client.transport.deidentify_content), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.DeidentifyContentResponse() @@ -729,7 +828,7 @@ def test_reidentify_content( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.reidentify_content), "__call__" + type(client.transport.reidentify_content), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.ReidentifyContentResponse() @@ -743,6 +842,7 @@ def test_reidentify_content( assert args[0] == dlp.ReidentifyContentRequest() # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ReidentifyContentResponse) @@ -751,18 +851,20 @@ def test_reidentify_content_from_dict(): @pytest.mark.asyncio -async def test_reidentify_content_async(transport: str = "grpc_asyncio"): +async def test_reidentify_content_async( + transport: str = "grpc_asyncio", request_type=dlp.ReidentifyContentRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.ReidentifyContentRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.reidentify_content), "__call__" + type(client.transport.reidentify_content), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -775,12 +877,17 @@ async def test_reidentify_content_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.ReidentifyContentRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.ReidentifyContentResponse) +@pytest.mark.asyncio +async def test_reidentify_content_async_from_dict(): + await test_reidentify_content_async(request_type=dict) + + def test_reidentify_content_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -791,7 +898,7 @@ def test_reidentify_content_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.reidentify_content), "__call__" + type(client.transport.reidentify_content), "__call__" ) as call: call.return_value = dlp.ReidentifyContentResponse() @@ -818,7 +925,7 @@ async def test_reidentify_content_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.reidentify_content), "__call__" + type(client.transport.reidentify_content), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.ReidentifyContentResponse() @@ -848,7 +955,7 @@ def test_list_info_types( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_info_types), "__call__") as call: + with mock.patch.object(type(client.transport.list_info_types), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = dlp.ListInfoTypesResponse() @@ -861,6 +968,7 @@ def test_list_info_types( assert args[0] == dlp.ListInfoTypesRequest() # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ListInfoTypesResponse) @@ -869,19 +977,19 @@ def test_list_info_types_from_dict(): @pytest.mark.asyncio -async def test_list_info_types_async(transport: str = "grpc_asyncio"): +async def test_list_info_types_async( + transport: str = "grpc_asyncio", request_type=dlp.ListInfoTypesRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.ListInfoTypesRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_info_types), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_info_types), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.ListInfoTypesResponse() @@ -893,17 +1001,22 @@ async def test_list_info_types_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.ListInfoTypesRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.ListInfoTypesResponse) +@pytest.mark.asyncio +async def test_list_info_types_async_from_dict(): + await test_list_info_types_async(request_type=dict) + + def test_list_info_types_flattened(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_info_types), "__call__") as call: + with mock.patch.object(type(client.transport.list_info_types), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = dlp.ListInfoTypesResponse() @@ -935,9 +1048,7 @@ async def test_list_info_types_flattened_async(): client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_info_types), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_info_types), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = dlp.ListInfoTypesResponse() @@ -981,7 +1092,7 @@ def test_create_inspect_template( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_inspect_template), "__call__" + type(client.transport.create_inspect_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.InspectTemplate( @@ -999,6 +1110,7 @@ def test_create_inspect_template( assert args[0] == dlp.CreateInspectTemplateRequest() # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) assert response.name == "name_value" @@ -1013,18 +1125,20 @@ def test_create_inspect_template_from_dict(): @pytest.mark.asyncio -async def test_create_inspect_template_async(transport: str = "grpc_asyncio"): +async def test_create_inspect_template_async( + transport: str = "grpc_asyncio", request_type=dlp.CreateInspectTemplateRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.CreateInspectTemplateRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_inspect_template), "__call__" + type(client.transport.create_inspect_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -1041,7 +1155,7 @@ async def test_create_inspect_template_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.CreateInspectTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.InspectTemplate) @@ -1053,6 +1167,11 @@ async def test_create_inspect_template_async(transport: str = "grpc_asyncio"): assert response.description == "description_value" +@pytest.mark.asyncio +async def test_create_inspect_template_async_from_dict(): + await test_create_inspect_template_async(request_type=dict) + + def test_create_inspect_template_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -1063,7 +1182,7 @@ def test_create_inspect_template_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_inspect_template), "__call__" + type(client.transport.create_inspect_template), "__call__" ) as call: call.return_value = dlp.InspectTemplate() @@ -1090,7 +1209,7 @@ async def test_create_inspect_template_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_inspect_template), "__call__" + type(client.transport.create_inspect_template), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) @@ -1111,7 +1230,7 @@ def test_create_inspect_template_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_inspect_template), "__call__" + type(client.transport.create_inspect_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.InspectTemplate() @@ -1152,7 +1271,7 @@ async def test_create_inspect_template_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_inspect_template), "__call__" + type(client.transport.create_inspect_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.InspectTemplate() @@ -1202,7 +1321,7 @@ def test_update_inspect_template( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_inspect_template), "__call__" + type(client.transport.update_inspect_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.InspectTemplate( @@ -1220,6 +1339,7 @@ def test_update_inspect_template( assert args[0] == dlp.UpdateInspectTemplateRequest() # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) assert response.name == "name_value" @@ -1234,18 +1354,20 @@ def test_update_inspect_template_from_dict(): @pytest.mark.asyncio -async def test_update_inspect_template_async(transport: str = "grpc_asyncio"): +async def test_update_inspect_template_async( + transport: str = "grpc_asyncio", request_type=dlp.UpdateInspectTemplateRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.UpdateInspectTemplateRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_inspect_template), "__call__" + type(client.transport.update_inspect_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -1262,7 +1384,7 @@ async def test_update_inspect_template_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.UpdateInspectTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.InspectTemplate) @@ -1274,6 +1396,11 @@ async def test_update_inspect_template_async(transport: str = "grpc_asyncio"): assert response.description == "description_value" +@pytest.mark.asyncio +async def test_update_inspect_template_async_from_dict(): + await test_update_inspect_template_async(request_type=dict) + + def test_update_inspect_template_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -1284,7 +1411,7 @@ def test_update_inspect_template_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_inspect_template), "__call__" + type(client.transport.update_inspect_template), "__call__" ) as call: call.return_value = dlp.InspectTemplate() @@ -1311,7 +1438,7 @@ async def test_update_inspect_template_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_inspect_template), "__call__" + type(client.transport.update_inspect_template), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) @@ -1332,7 +1459,7 @@ def test_update_inspect_template_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_inspect_template), "__call__" + type(client.transport.update_inspect_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.InspectTemplate() @@ -1377,7 +1504,7 @@ async def test_update_inspect_template_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_inspect_template), "__call__" + type(client.transport.update_inspect_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.InspectTemplate() @@ -1431,7 +1558,7 @@ def test_get_inspect_template( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.get_inspect_template), "__call__" + type(client.transport.get_inspect_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.InspectTemplate( @@ -1449,6 +1576,7 @@ def test_get_inspect_template( assert args[0] == dlp.GetInspectTemplateRequest() # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) assert response.name == "name_value" @@ -1463,18 +1591,20 @@ def test_get_inspect_template_from_dict(): @pytest.mark.asyncio -async def test_get_inspect_template_async(transport: str = "grpc_asyncio"): +async def test_get_inspect_template_async( + transport: str = "grpc_asyncio", request_type=dlp.GetInspectTemplateRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.GetInspectTemplateRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.get_inspect_template), "__call__" + type(client.transport.get_inspect_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -1491,7 +1621,7 @@ async def test_get_inspect_template_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.GetInspectTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.InspectTemplate) @@ -1503,6 +1633,11 @@ async def test_get_inspect_template_async(transport: str = "grpc_asyncio"): assert response.description == "description_value" +@pytest.mark.asyncio +async def test_get_inspect_template_async_from_dict(): + await test_get_inspect_template_async(request_type=dict) + + def test_get_inspect_template_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -1513,7 +1648,7 @@ def test_get_inspect_template_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.get_inspect_template), "__call__" + type(client.transport.get_inspect_template), "__call__" ) as call: call.return_value = dlp.InspectTemplate() @@ -1540,7 +1675,7 @@ async def test_get_inspect_template_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.get_inspect_template), "__call__" + type(client.transport.get_inspect_template), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) @@ -1561,7 +1696,7 @@ def test_get_inspect_template_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.get_inspect_template), "__call__" + type(client.transport.get_inspect_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.InspectTemplate() @@ -1595,7 +1730,7 @@ async def test_get_inspect_template_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.get_inspect_template), "__call__" + type(client.transport.get_inspect_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.InspectTemplate() @@ -1638,7 +1773,7 @@ def test_list_inspect_templates( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_inspect_templates), "__call__" + type(client.transport.list_inspect_templates), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.ListInspectTemplatesResponse( @@ -1654,6 +1789,7 @@ def test_list_inspect_templates( assert args[0] == dlp.ListInspectTemplatesRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInspectTemplatesPager) assert response.next_page_token == "next_page_token_value" @@ -1664,18 +1800,20 @@ def test_list_inspect_templates_from_dict(): @pytest.mark.asyncio -async def test_list_inspect_templates_async(transport: str = "grpc_asyncio"): +async def test_list_inspect_templates_async( + transport: str = "grpc_asyncio", request_type=dlp.ListInspectTemplatesRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.ListInspectTemplatesRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_inspect_templates), "__call__" + type(client.transport.list_inspect_templates), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -1688,7 +1826,7 @@ async def test_list_inspect_templates_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.ListInspectTemplatesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListInspectTemplatesAsyncPager) @@ -1696,6 +1834,11 @@ async def test_list_inspect_templates_async(transport: str = "grpc_asyncio"): assert response.next_page_token == "next_page_token_value" +@pytest.mark.asyncio +async def test_list_inspect_templates_async_from_dict(): + await test_list_inspect_templates_async(request_type=dict) + + def test_list_inspect_templates_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -1706,7 +1849,7 @@ def test_list_inspect_templates_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_inspect_templates), "__call__" + type(client.transport.list_inspect_templates), "__call__" ) as call: call.return_value = dlp.ListInspectTemplatesResponse() @@ -1733,7 +1876,7 @@ async def test_list_inspect_templates_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_inspect_templates), "__call__" + type(client.transport.list_inspect_templates), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.ListInspectTemplatesResponse() @@ -1756,7 +1899,7 @@ def test_list_inspect_templates_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_inspect_templates), "__call__" + type(client.transport.list_inspect_templates), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.ListInspectTemplatesResponse() @@ -1790,7 +1933,7 @@ async def test_list_inspect_templates_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_inspect_templates), "__call__" + type(client.transport.list_inspect_templates), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.ListInspectTemplatesResponse() @@ -1827,7 +1970,7 @@ def test_list_inspect_templates_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_inspect_templates), "__call__" + type(client.transport.list_inspect_templates), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -1869,7 +2012,7 @@ def test_list_inspect_templates_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_inspect_templates), "__call__" + type(client.transport.list_inspect_templates), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -1893,8 +2036,8 @@ def test_list_inspect_templates_pages(): RuntimeError, ) pages = list(client.list_inspect_templates(request={}).pages) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.asyncio @@ -1903,7 +2046,7 @@ async def test_list_inspect_templates_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_inspect_templates), + type(client.transport.list_inspect_templates), "__call__", new_callable=mock.AsyncMock, ) as call: @@ -1944,7 +2087,7 @@ async def test_list_inspect_templates_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_inspect_templates), + type(client.transport.list_inspect_templates), "__call__", new_callable=mock.AsyncMock, ) as call: @@ -1970,10 +2113,10 @@ async def test_list_inspect_templates_async_pages(): RuntimeError, ) pages = [] - async for page in (await client.list_inspect_templates(request={})).pages: - pages.append(page) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + async for page_ in (await client.list_inspect_templates(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token def test_delete_inspect_template( @@ -1989,7 +2132,7 @@ def test_delete_inspect_template( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_inspect_template), "__call__" + type(client.transport.delete_inspect_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2011,18 +2154,20 @@ def test_delete_inspect_template_from_dict(): @pytest.mark.asyncio -async def test_delete_inspect_template_async(transport: str = "grpc_asyncio"): +async def test_delete_inspect_template_async( + transport: str = "grpc_asyncio", request_type=dlp.DeleteInspectTemplateRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.DeleteInspectTemplateRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_inspect_template), "__call__" + type(client.transport.delete_inspect_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -2033,12 +2178,17 @@ async def test_delete_inspect_template_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.DeleteInspectTemplateRequest() # Establish that the response is the type that we expect. assert response is None +@pytest.mark.asyncio +async def test_delete_inspect_template_async_from_dict(): + await test_delete_inspect_template_async(request_type=dict) + + def test_delete_inspect_template_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -2049,7 +2199,7 @@ def test_delete_inspect_template_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_inspect_template), "__call__" + type(client.transport.delete_inspect_template), "__call__" ) as call: call.return_value = None @@ -2076,7 +2226,7 @@ async def test_delete_inspect_template_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_inspect_template), "__call__" + type(client.transport.delete_inspect_template), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -2097,7 +2247,7 @@ def test_delete_inspect_template_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_inspect_template), "__call__" + type(client.transport.delete_inspect_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2131,7 +2281,7 @@ async def test_delete_inspect_template_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_inspect_template), "__call__" + type(client.transport.delete_inspect_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -2174,7 +2324,7 @@ def test_create_deidentify_template( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_deidentify_template), "__call__" + type(client.transport.create_deidentify_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.DeidentifyTemplate( @@ -2192,6 +2342,7 @@ def test_create_deidentify_template( assert args[0] == dlp.CreateDeidentifyTemplateRequest() # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) assert response.name == "name_value" @@ -2206,18 +2357,20 @@ def test_create_deidentify_template_from_dict(): @pytest.mark.asyncio -async def test_create_deidentify_template_async(transport: str = "grpc_asyncio"): +async def test_create_deidentify_template_async( + transport: str = "grpc_asyncio", request_type=dlp.CreateDeidentifyTemplateRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.CreateDeidentifyTemplateRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_deidentify_template), "__call__" + type(client.transport.create_deidentify_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -2234,7 +2387,7 @@ async def test_create_deidentify_template_async(transport: str = "grpc_asyncio") assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.CreateDeidentifyTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.DeidentifyTemplate) @@ -2246,6 +2399,11 @@ async def test_create_deidentify_template_async(transport: str = "grpc_asyncio") assert response.description == "description_value" +@pytest.mark.asyncio +async def test_create_deidentify_template_async_from_dict(): + await test_create_deidentify_template_async(request_type=dict) + + def test_create_deidentify_template_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -2256,7 +2414,7 @@ def test_create_deidentify_template_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_deidentify_template), "__call__" + type(client.transport.create_deidentify_template), "__call__" ) as call: call.return_value = dlp.DeidentifyTemplate() @@ -2283,7 +2441,7 @@ async def test_create_deidentify_template_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_deidentify_template), "__call__" + type(client.transport.create_deidentify_template), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.DeidentifyTemplate() @@ -2306,7 +2464,7 @@ def test_create_deidentify_template_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_deidentify_template), "__call__" + type(client.transport.create_deidentify_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.DeidentifyTemplate() @@ -2347,7 +2505,7 @@ async def test_create_deidentify_template_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_deidentify_template), "__call__" + type(client.transport.create_deidentify_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.DeidentifyTemplate() @@ -2399,7 +2557,7 @@ def test_update_deidentify_template( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_deidentify_template), "__call__" + type(client.transport.update_deidentify_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.DeidentifyTemplate( @@ -2417,6 +2575,7 @@ def test_update_deidentify_template( assert args[0] == dlp.UpdateDeidentifyTemplateRequest() # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) assert response.name == "name_value" @@ -2431,18 +2590,20 @@ def test_update_deidentify_template_from_dict(): @pytest.mark.asyncio -async def test_update_deidentify_template_async(transport: str = "grpc_asyncio"): +async def test_update_deidentify_template_async( + transport: str = "grpc_asyncio", request_type=dlp.UpdateDeidentifyTemplateRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.UpdateDeidentifyTemplateRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_deidentify_template), "__call__" + type(client.transport.update_deidentify_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -2459,7 +2620,7 @@ async def test_update_deidentify_template_async(transport: str = "grpc_asyncio") assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.UpdateDeidentifyTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.DeidentifyTemplate) @@ -2471,6 +2632,11 @@ async def test_update_deidentify_template_async(transport: str = "grpc_asyncio") assert response.description == "description_value" +@pytest.mark.asyncio +async def test_update_deidentify_template_async_from_dict(): + await test_update_deidentify_template_async(request_type=dict) + + def test_update_deidentify_template_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -2481,7 +2647,7 @@ def test_update_deidentify_template_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_deidentify_template), "__call__" + type(client.transport.update_deidentify_template), "__call__" ) as call: call.return_value = dlp.DeidentifyTemplate() @@ -2508,7 +2674,7 @@ async def test_update_deidentify_template_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_deidentify_template), "__call__" + type(client.transport.update_deidentify_template), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.DeidentifyTemplate() @@ -2531,7 +2697,7 @@ def test_update_deidentify_template_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_deidentify_template), "__call__" + type(client.transport.update_deidentify_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.DeidentifyTemplate() @@ -2576,7 +2742,7 @@ async def test_update_deidentify_template_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_deidentify_template), "__call__" + type(client.transport.update_deidentify_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.DeidentifyTemplate() @@ -2632,7 +2798,7 @@ def test_get_deidentify_template( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.get_deidentify_template), "__call__" + type(client.transport.get_deidentify_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.DeidentifyTemplate( @@ -2650,6 +2816,7 @@ def test_get_deidentify_template( assert args[0] == dlp.GetDeidentifyTemplateRequest() # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) assert response.name == "name_value" @@ -2664,18 +2831,20 @@ def test_get_deidentify_template_from_dict(): @pytest.mark.asyncio -async def test_get_deidentify_template_async(transport: str = "grpc_asyncio"): +async def test_get_deidentify_template_async( + transport: str = "grpc_asyncio", request_type=dlp.GetDeidentifyTemplateRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.GetDeidentifyTemplateRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.get_deidentify_template), "__call__" + type(client.transport.get_deidentify_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -2692,7 +2861,7 @@ async def test_get_deidentify_template_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.GetDeidentifyTemplateRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.DeidentifyTemplate) @@ -2704,6 +2873,11 @@ async def test_get_deidentify_template_async(transport: str = "grpc_asyncio"): assert response.description == "description_value" +@pytest.mark.asyncio +async def test_get_deidentify_template_async_from_dict(): + await test_get_deidentify_template_async(request_type=dict) + + def test_get_deidentify_template_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -2714,7 +2888,7 @@ def test_get_deidentify_template_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.get_deidentify_template), "__call__" + type(client.transport.get_deidentify_template), "__call__" ) as call: call.return_value = dlp.DeidentifyTemplate() @@ -2741,7 +2915,7 @@ async def test_get_deidentify_template_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.get_deidentify_template), "__call__" + type(client.transport.get_deidentify_template), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.DeidentifyTemplate() @@ -2764,7 +2938,7 @@ def test_get_deidentify_template_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.get_deidentify_template), "__call__" + type(client.transport.get_deidentify_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.DeidentifyTemplate() @@ -2798,7 +2972,7 @@ async def test_get_deidentify_template_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.get_deidentify_template), "__call__" + type(client.transport.get_deidentify_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.DeidentifyTemplate() @@ -2843,7 +3017,7 @@ def test_list_deidentify_templates( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_deidentify_templates), "__call__" + type(client.transport.list_deidentify_templates), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.ListDeidentifyTemplatesResponse( @@ -2859,6 +3033,7 @@ def test_list_deidentify_templates( assert args[0] == dlp.ListDeidentifyTemplatesRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeidentifyTemplatesPager) assert response.next_page_token == "next_page_token_value" @@ -2869,18 +3044,20 @@ def test_list_deidentify_templates_from_dict(): @pytest.mark.asyncio -async def test_list_deidentify_templates_async(transport: str = "grpc_asyncio"): +async def test_list_deidentify_templates_async( + transport: str = "grpc_asyncio", request_type=dlp.ListDeidentifyTemplatesRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.ListDeidentifyTemplatesRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_deidentify_templates), "__call__" + type(client.transport.list_deidentify_templates), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -2895,7 +3072,7 @@ async def test_list_deidentify_templates_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.ListDeidentifyTemplatesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListDeidentifyTemplatesAsyncPager) @@ -2903,6 +3080,11 @@ async def test_list_deidentify_templates_async(transport: str = "grpc_asyncio"): assert response.next_page_token == "next_page_token_value" +@pytest.mark.asyncio +async def test_list_deidentify_templates_async_from_dict(): + await test_list_deidentify_templates_async(request_type=dict) + + def test_list_deidentify_templates_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -2913,7 +3095,7 @@ def test_list_deidentify_templates_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_deidentify_templates), "__call__" + type(client.transport.list_deidentify_templates), "__call__" ) as call: call.return_value = dlp.ListDeidentifyTemplatesResponse() @@ -2940,7 +3122,7 @@ async def test_list_deidentify_templates_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_deidentify_templates), "__call__" + type(client.transport.list_deidentify_templates), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.ListDeidentifyTemplatesResponse() @@ -2963,7 +3145,7 @@ def test_list_deidentify_templates_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_deidentify_templates), "__call__" + type(client.transport.list_deidentify_templates), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.ListDeidentifyTemplatesResponse() @@ -2997,7 +3179,7 @@ async def test_list_deidentify_templates_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_deidentify_templates), "__call__" + type(client.transport.list_deidentify_templates), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.ListDeidentifyTemplatesResponse() @@ -3034,7 +3216,7 @@ def test_list_deidentify_templates_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_deidentify_templates), "__call__" + type(client.transport.list_deidentify_templates), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -3079,7 +3261,7 @@ def test_list_deidentify_templates_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_deidentify_templates), "__call__" + type(client.transport.list_deidentify_templates), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -3106,8 +3288,8 @@ def test_list_deidentify_templates_pages(): RuntimeError, ) pages = list(client.list_deidentify_templates(request={}).pages) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.asyncio @@ -3116,7 +3298,7 @@ async def test_list_deidentify_templates_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_deidentify_templates), + type(client.transport.list_deidentify_templates), "__call__", new_callable=mock.AsyncMock, ) as call: @@ -3160,7 +3342,7 @@ async def test_list_deidentify_templates_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_deidentify_templates), + type(client.transport.list_deidentify_templates), "__call__", new_callable=mock.AsyncMock, ) as call: @@ -3189,10 +3371,10 @@ async def test_list_deidentify_templates_async_pages(): RuntimeError, ) pages = [] - async for page in (await client.list_deidentify_templates(request={})).pages: - pages.append(page) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + async for page_ in (await client.list_deidentify_templates(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token def test_delete_deidentify_template( @@ -3208,7 +3390,7 @@ def test_delete_deidentify_template( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_deidentify_template), "__call__" + type(client.transport.delete_deidentify_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -3230,18 +3412,20 @@ def test_delete_deidentify_template_from_dict(): @pytest.mark.asyncio -async def test_delete_deidentify_template_async(transport: str = "grpc_asyncio"): +async def test_delete_deidentify_template_async( + transport: str = "grpc_asyncio", request_type=dlp.DeleteDeidentifyTemplateRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.DeleteDeidentifyTemplateRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_deidentify_template), "__call__" + type(client.transport.delete_deidentify_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -3252,12 +3436,17 @@ async def test_delete_deidentify_template_async(transport: str = "grpc_asyncio") assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.DeleteDeidentifyTemplateRequest() # Establish that the response is the type that we expect. assert response is None +@pytest.mark.asyncio +async def test_delete_deidentify_template_async_from_dict(): + await test_delete_deidentify_template_async(request_type=dict) + + def test_delete_deidentify_template_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -3268,7 +3457,7 @@ def test_delete_deidentify_template_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_deidentify_template), "__call__" + type(client.transport.delete_deidentify_template), "__call__" ) as call: call.return_value = None @@ -3295,7 +3484,7 @@ async def test_delete_deidentify_template_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_deidentify_template), "__call__" + type(client.transport.delete_deidentify_template), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -3316,7 +3505,7 @@ def test_delete_deidentify_template_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_deidentify_template), "__call__" + type(client.transport.delete_deidentify_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -3350,7 +3539,7 @@ async def test_delete_deidentify_template_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_deidentify_template), "__call__" + type(client.transport.delete_deidentify_template), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -3393,7 +3582,7 @@ def test_create_job_trigger( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_job_trigger), "__call__" + type(client.transport.create_job_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.JobTrigger( @@ -3419,6 +3608,7 @@ def test_create_job_trigger( assert args[0] == dlp.CreateJobTriggerRequest() # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) assert response.name == "name_value" @@ -3435,18 +3625,20 @@ def test_create_job_trigger_from_dict(): @pytest.mark.asyncio -async def test_create_job_trigger_async(transport: str = "grpc_asyncio"): +async def test_create_job_trigger_async( + transport: str = "grpc_asyncio", request_type=dlp.CreateJobTriggerRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.CreateJobTriggerRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_job_trigger), "__call__" + type(client.transport.create_job_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -3464,7 +3656,7 @@ async def test_create_job_trigger_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.CreateJobTriggerRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.JobTrigger) @@ -3478,6 +3670,11 @@ async def test_create_job_trigger_async(transport: str = "grpc_asyncio"): assert response.status == dlp.JobTrigger.Status.HEALTHY +@pytest.mark.asyncio +async def test_create_job_trigger_async_from_dict(): + await test_create_job_trigger_async(request_type=dict) + + def test_create_job_trigger_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -3488,7 +3685,7 @@ def test_create_job_trigger_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_job_trigger), "__call__" + type(client.transport.create_job_trigger), "__call__" ) as call: call.return_value = dlp.JobTrigger() @@ -3515,7 +3712,7 @@ async def test_create_job_trigger_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_job_trigger), "__call__" + type(client.transport.create_job_trigger), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) @@ -3536,7 +3733,7 @@ def test_create_job_trigger_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_job_trigger), "__call__" + type(client.transport.create_job_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.JobTrigger() @@ -3576,7 +3773,7 @@ async def test_create_job_trigger_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_job_trigger), "__call__" + type(client.transport.create_job_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.JobTrigger() @@ -3625,7 +3822,7 @@ def test_update_job_trigger( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_job_trigger), "__call__" + type(client.transport.update_job_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.JobTrigger( @@ -3651,6 +3848,7 @@ def test_update_job_trigger( assert args[0] == dlp.UpdateJobTriggerRequest() # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) assert response.name == "name_value" @@ -3667,18 +3865,20 @@ def test_update_job_trigger_from_dict(): @pytest.mark.asyncio -async def test_update_job_trigger_async(transport: str = "grpc_asyncio"): +async def test_update_job_trigger_async( + transport: str = "grpc_asyncio", request_type=dlp.UpdateJobTriggerRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.UpdateJobTriggerRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_job_trigger), "__call__" + type(client.transport.update_job_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -3696,7 +3896,7 @@ async def test_update_job_trigger_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.UpdateJobTriggerRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.JobTrigger) @@ -3710,6 +3910,11 @@ async def test_update_job_trigger_async(transport: str = "grpc_asyncio"): assert response.status == dlp.JobTrigger.Status.HEALTHY +@pytest.mark.asyncio +async def test_update_job_trigger_async_from_dict(): + await test_update_job_trigger_async(request_type=dict) + + def test_update_job_trigger_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -3720,7 +3925,7 @@ def test_update_job_trigger_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_job_trigger), "__call__" + type(client.transport.update_job_trigger), "__call__" ) as call: call.return_value = dlp.JobTrigger() @@ -3747,7 +3952,7 @@ async def test_update_job_trigger_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_job_trigger), "__call__" + type(client.transport.update_job_trigger), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) @@ -3768,7 +3973,7 @@ def test_update_job_trigger_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_job_trigger), "__call__" + type(client.transport.update_job_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.JobTrigger() @@ -3813,7 +4018,7 @@ async def test_update_job_trigger_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_job_trigger), "__call__" + type(client.transport.update_job_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.JobTrigger() @@ -3867,7 +4072,7 @@ def test_hybrid_inspect_job_trigger( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.hybrid_inspect_job_trigger), "__call__" + type(client.transport.hybrid_inspect_job_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.HybridInspectResponse() @@ -3881,6 +4086,7 @@ def test_hybrid_inspect_job_trigger( assert args[0] == dlp.HybridInspectJobTriggerRequest() # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) @@ -3889,18 +4095,20 @@ def test_hybrid_inspect_job_trigger_from_dict(): @pytest.mark.asyncio -async def test_hybrid_inspect_job_trigger_async(transport: str = "grpc_asyncio"): +async def test_hybrid_inspect_job_trigger_async( + transport: str = "grpc_asyncio", request_type=dlp.HybridInspectJobTriggerRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.HybridInspectJobTriggerRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.hybrid_inspect_job_trigger), "__call__" + type(client.transport.hybrid_inspect_job_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -3913,12 +4121,17 @@ async def test_hybrid_inspect_job_trigger_async(transport: str = "grpc_asyncio") assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.HybridInspectJobTriggerRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.HybridInspectResponse) +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_async_from_dict(): + await test_hybrid_inspect_job_trigger_async(request_type=dict) + + def test_hybrid_inspect_job_trigger_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -3929,7 +4142,7 @@ def test_hybrid_inspect_job_trigger_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.hybrid_inspect_job_trigger), "__call__" + type(client.transport.hybrid_inspect_job_trigger), "__call__" ) as call: call.return_value = dlp.HybridInspectResponse() @@ -3956,7 +4169,7 @@ async def test_hybrid_inspect_job_trigger_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.hybrid_inspect_job_trigger), "__call__" + type(client.transport.hybrid_inspect_job_trigger), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.HybridInspectResponse() @@ -3979,7 +4192,7 @@ def test_hybrid_inspect_job_trigger_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.hybrid_inspect_job_trigger), "__call__" + type(client.transport.hybrid_inspect_job_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.HybridInspectResponse() @@ -4013,7 +4226,7 @@ async def test_hybrid_inspect_job_trigger_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.hybrid_inspect_job_trigger), "__call__" + type(client.transport.hybrid_inspect_job_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.HybridInspectResponse() @@ -4057,7 +4270,7 @@ def test_get_job_trigger( request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_job_trigger), "__call__") as call: + with mock.patch.object(type(client.transport.get_job_trigger), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = dlp.JobTrigger( name="name_value", @@ -4082,6 +4295,7 @@ def test_get_job_trigger( assert args[0] == dlp.GetJobTriggerRequest() # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) assert response.name == "name_value" @@ -4098,19 +4312,19 @@ def test_get_job_trigger_from_dict(): @pytest.mark.asyncio -async def test_get_job_trigger_async(transport: str = "grpc_asyncio"): +async def test_get_job_trigger_async( + transport: str = "grpc_asyncio", request_type=dlp.GetJobTriggerRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.GetJobTriggerRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_job_trigger), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_job_trigger), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.JobTrigger( @@ -4127,7 +4341,7 @@ async def test_get_job_trigger_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.GetJobTriggerRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.JobTrigger) @@ -4141,6 +4355,11 @@ async def test_get_job_trigger_async(transport: str = "grpc_asyncio"): assert response.status == dlp.JobTrigger.Status.HEALTHY +@pytest.mark.asyncio +async def test_get_job_trigger_async_from_dict(): + await test_get_job_trigger_async(request_type=dict) + + def test_get_job_trigger_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -4150,7 +4369,7 @@ def test_get_job_trigger_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_job_trigger), "__call__") as call: + with mock.patch.object(type(client.transport.get_job_trigger), "__call__") as call: call.return_value = dlp.JobTrigger() client.get_job_trigger(request) @@ -4175,9 +4394,7 @@ async def test_get_job_trigger_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_job_trigger), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_job_trigger), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) await client.get_job_trigger(request) @@ -4196,7 +4413,7 @@ def test_get_job_trigger_flattened(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_job_trigger), "__call__") as call: + with mock.patch.object(type(client.transport.get_job_trigger), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = dlp.JobTrigger() @@ -4228,9 +4445,7 @@ async def test_get_job_trigger_flattened_async(): client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_job_trigger), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_job_trigger), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = dlp.JobTrigger() @@ -4272,7 +4487,7 @@ def test_list_job_triggers( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_job_triggers), "__call__" + type(client.transport.list_job_triggers), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.ListJobTriggersResponse( @@ -4288,6 +4503,7 @@ def test_list_job_triggers( assert args[0] == dlp.ListJobTriggersRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobTriggersPager) assert response.next_page_token == "next_page_token_value" @@ -4298,18 +4514,20 @@ def test_list_job_triggers_from_dict(): @pytest.mark.asyncio -async def test_list_job_triggers_async(transport: str = "grpc_asyncio"): +async def test_list_job_triggers_async( + transport: str = "grpc_asyncio", request_type=dlp.ListJobTriggersRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.ListJobTriggersRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_job_triggers), "__call__" + type(client.transport.list_job_triggers), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -4322,7 +4540,7 @@ async def test_list_job_triggers_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.ListJobTriggersRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListJobTriggersAsyncPager) @@ -4330,6 +4548,11 @@ async def test_list_job_triggers_async(transport: str = "grpc_asyncio"): assert response.next_page_token == "next_page_token_value" +@pytest.mark.asyncio +async def test_list_job_triggers_async_from_dict(): + await test_list_job_triggers_async(request_type=dict) + + def test_list_job_triggers_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -4340,7 +4563,7 @@ def test_list_job_triggers_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_job_triggers), "__call__" + type(client.transport.list_job_triggers), "__call__" ) as call: call.return_value = dlp.ListJobTriggersResponse() @@ -4367,7 +4590,7 @@ async def test_list_job_triggers_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_job_triggers), "__call__" + type(client.transport.list_job_triggers), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.ListJobTriggersResponse() @@ -4390,7 +4613,7 @@ def test_list_job_triggers_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_job_triggers), "__call__" + type(client.transport.list_job_triggers), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.ListJobTriggersResponse() @@ -4424,7 +4647,7 @@ async def test_list_job_triggers_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_job_triggers), "__call__" + type(client.transport.list_job_triggers), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.ListJobTriggersResponse() @@ -4461,7 +4684,7 @@ def test_list_job_triggers_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_job_triggers), "__call__" + type(client.transport.list_job_triggers), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -4497,7 +4720,7 @@ def test_list_job_triggers_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_job_triggers), "__call__" + type(client.transport.list_job_triggers), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -4515,8 +4738,8 @@ def test_list_job_triggers_pages(): RuntimeError, ) pages = list(client.list_job_triggers(request={}).pages) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.asyncio @@ -4525,7 +4748,7 @@ async def test_list_job_triggers_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_job_triggers), + type(client.transport.list_job_triggers), "__call__", new_callable=mock.AsyncMock, ) as call: @@ -4560,7 +4783,7 @@ async def test_list_job_triggers_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_job_triggers), + type(client.transport.list_job_triggers), "__call__", new_callable=mock.AsyncMock, ) as call: @@ -4580,10 +4803,10 @@ async def test_list_job_triggers_async_pages(): RuntimeError, ) pages = [] - async for page in (await client.list_job_triggers(request={})).pages: - pages.append(page) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + async for page_ in (await client.list_job_triggers(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token def test_delete_job_trigger( @@ -4599,7 +4822,7 @@ def test_delete_job_trigger( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_job_trigger), "__call__" + type(client.transport.delete_job_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -4621,18 +4844,20 @@ def test_delete_job_trigger_from_dict(): @pytest.mark.asyncio -async def test_delete_job_trigger_async(transport: str = "grpc_asyncio"): +async def test_delete_job_trigger_async( + transport: str = "grpc_asyncio", request_type=dlp.DeleteJobTriggerRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.DeleteJobTriggerRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_job_trigger), "__call__" + type(client.transport.delete_job_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -4643,12 +4868,17 @@ async def test_delete_job_trigger_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.DeleteJobTriggerRequest() # Establish that the response is the type that we expect. assert response is None +@pytest.mark.asyncio +async def test_delete_job_trigger_async_from_dict(): + await test_delete_job_trigger_async(request_type=dict) + + def test_delete_job_trigger_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -4659,7 +4889,7 @@ def test_delete_job_trigger_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_job_trigger), "__call__" + type(client.transport.delete_job_trigger), "__call__" ) as call: call.return_value = None @@ -4686,7 +4916,7 @@ async def test_delete_job_trigger_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_job_trigger), "__call__" + type(client.transport.delete_job_trigger), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -4707,7 +4937,7 @@ def test_delete_job_trigger_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_job_trigger), "__call__" + type(client.transport.delete_job_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -4741,7 +4971,7 @@ async def test_delete_job_trigger_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_job_trigger), "__call__" + type(client.transport.delete_job_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -4784,12 +5014,12 @@ def test_activate_job_trigger( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.activate_job_trigger), "__call__" + type(client.transport.activate_job_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.DlpJob( name="name_value", - type=dlp.DlpJobType.INSPECT_JOB, + type_=dlp.DlpJobType.INSPECT_JOB, state=dlp.DlpJob.JobState.PENDING, job_trigger_name="job_trigger_name_value", risk_details=dlp.AnalyzeDataSourceRiskDetails( @@ -4810,11 +5040,12 @@ def test_activate_job_trigger( assert args[0] == dlp.ActivateJobTriggerRequest() # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) assert response.name == "name_value" - assert response.type == dlp.DlpJobType.INSPECT_JOB + assert response.type_ == dlp.DlpJobType.INSPECT_JOB assert response.state == dlp.DlpJob.JobState.PENDING @@ -4826,24 +5057,26 @@ def test_activate_job_trigger_from_dict(): @pytest.mark.asyncio -async def test_activate_job_trigger_async(transport: str = "grpc_asyncio"): +async def test_activate_job_trigger_async( + transport: str = "grpc_asyncio", request_type=dlp.ActivateJobTriggerRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.ActivateJobTriggerRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.activate_job_trigger), "__call__" + type(client.transport.activate_job_trigger), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.DlpJob( name="name_value", - type=dlp.DlpJobType.INSPECT_JOB, + type_=dlp.DlpJobType.INSPECT_JOB, state=dlp.DlpJob.JobState.PENDING, job_trigger_name="job_trigger_name_value", ) @@ -4855,20 +5088,25 @@ async def test_activate_job_trigger_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.ActivateJobTriggerRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.DlpJob) assert response.name == "name_value" - assert response.type == dlp.DlpJobType.INSPECT_JOB + assert response.type_ == dlp.DlpJobType.INSPECT_JOB assert response.state == dlp.DlpJob.JobState.PENDING assert response.job_trigger_name == "job_trigger_name_value" +@pytest.mark.asyncio +async def test_activate_job_trigger_async_from_dict(): + await test_activate_job_trigger_async(request_type=dict) + + def test_activate_job_trigger_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -4879,7 +5117,7 @@ def test_activate_job_trigger_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.activate_job_trigger), "__call__" + type(client.transport.activate_job_trigger), "__call__" ) as call: call.return_value = dlp.DlpJob() @@ -4906,7 +5144,7 @@ async def test_activate_job_trigger_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.activate_job_trigger), "__call__" + type(client.transport.activate_job_trigger), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) @@ -4932,11 +5170,11 @@ def test_create_dlp_job(transport: str = "grpc", request_type=dlp.CreateDlpJobRe request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_dlp_job), "__call__") as call: + with mock.patch.object(type(client.transport.create_dlp_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = dlp.DlpJob( name="name_value", - type=dlp.DlpJobType.INSPECT_JOB, + type_=dlp.DlpJobType.INSPECT_JOB, state=dlp.DlpJob.JobState.PENDING, job_trigger_name="job_trigger_name_value", risk_details=dlp.AnalyzeDataSourceRiskDetails( @@ -4957,11 +5195,12 @@ def test_create_dlp_job(transport: str = "grpc", request_type=dlp.CreateDlpJobRe assert args[0] == dlp.CreateDlpJobRequest() # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) assert response.name == "name_value" - assert response.type == dlp.DlpJobType.INSPECT_JOB + assert response.type_ == dlp.DlpJobType.INSPECT_JOB assert response.state == dlp.DlpJob.JobState.PENDING @@ -4973,24 +5212,24 @@ def test_create_dlp_job_from_dict(): @pytest.mark.asyncio -async def test_create_dlp_job_async(transport: str = "grpc_asyncio"): +async def test_create_dlp_job_async( + transport: str = "grpc_asyncio", request_type=dlp.CreateDlpJobRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.CreateDlpJobRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_dlp_job), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_dlp_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.DlpJob( name="name_value", - type=dlp.DlpJobType.INSPECT_JOB, + type_=dlp.DlpJobType.INSPECT_JOB, state=dlp.DlpJob.JobState.PENDING, job_trigger_name="job_trigger_name_value", ) @@ -5002,20 +5241,25 @@ async def test_create_dlp_job_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.CreateDlpJobRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.DlpJob) assert response.name == "name_value" - assert response.type == dlp.DlpJobType.INSPECT_JOB + assert response.type_ == dlp.DlpJobType.INSPECT_JOB assert response.state == dlp.DlpJob.JobState.PENDING assert response.job_trigger_name == "job_trigger_name_value" +@pytest.mark.asyncio +async def test_create_dlp_job_async_from_dict(): + await test_create_dlp_job_async(request_type=dict) + + def test_create_dlp_job_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -5025,7 +5269,7 @@ def test_create_dlp_job_field_headers(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_dlp_job), "__call__") as call: + with mock.patch.object(type(client.transport.create_dlp_job), "__call__") as call: call.return_value = dlp.DlpJob() client.create_dlp_job(request) @@ -5050,9 +5294,7 @@ async def test_create_dlp_job_field_headers_async(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_dlp_job), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_dlp_job), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) await client.create_dlp_job(request) @@ -5071,7 +5313,7 @@ def test_create_dlp_job_flattened(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.create_dlp_job), "__call__") as call: + with mock.patch.object(type(client.transport.create_dlp_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = dlp.DlpJob() @@ -5142,9 +5384,7 @@ async def test_create_dlp_job_flattened_async(): client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.create_dlp_job), "__call__" - ) as call: + with mock.patch.object(type(client.transport.create_dlp_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = dlp.DlpJob() @@ -5222,7 +5462,7 @@ def test_list_dlp_jobs(transport: str = "grpc", request_type=dlp.ListDlpJobsRequ request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_dlp_jobs), "__call__") as call: + with mock.patch.object(type(client.transport.list_dlp_jobs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = dlp.ListDlpJobsResponse( next_page_token="next_page_token_value", @@ -5237,6 +5477,7 @@ def test_list_dlp_jobs(transport: str = "grpc", request_type=dlp.ListDlpJobsRequ assert args[0] == dlp.ListDlpJobsRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDlpJobsPager) assert response.next_page_token == "next_page_token_value" @@ -5247,19 +5488,19 @@ def test_list_dlp_jobs_from_dict(): @pytest.mark.asyncio -async def test_list_dlp_jobs_async(transport: str = "grpc_asyncio"): +async def test_list_dlp_jobs_async( + transport: str = "grpc_asyncio", request_type=dlp.ListDlpJobsRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.ListDlpJobsRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_dlp_jobs), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_dlp_jobs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.ListDlpJobsResponse(next_page_token="next_page_token_value",) @@ -5271,7 +5512,7 @@ async def test_list_dlp_jobs_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.ListDlpJobsRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListDlpJobsAsyncPager) @@ -5279,6 +5520,11 @@ async def test_list_dlp_jobs_async(transport: str = "grpc_asyncio"): assert response.next_page_token == "next_page_token_value" +@pytest.mark.asyncio +async def test_list_dlp_jobs_async_from_dict(): + await test_list_dlp_jobs_async(request_type=dict) + + def test_list_dlp_jobs_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -5288,7 +5534,7 @@ def test_list_dlp_jobs_field_headers(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_dlp_jobs), "__call__") as call: + with mock.patch.object(type(client.transport.list_dlp_jobs), "__call__") as call: call.return_value = dlp.ListDlpJobsResponse() client.list_dlp_jobs(request) @@ -5313,9 +5559,7 @@ async def test_list_dlp_jobs_field_headers_async(): request.parent = "parent/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_dlp_jobs), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_dlp_jobs), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.ListDlpJobsResponse() ) @@ -5336,7 +5580,7 @@ def test_list_dlp_jobs_flattened(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_dlp_jobs), "__call__") as call: + with mock.patch.object(type(client.transport.list_dlp_jobs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = dlp.ListDlpJobsResponse() @@ -5368,9 +5612,7 @@ async def test_list_dlp_jobs_flattened_async(): client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.list_dlp_jobs), "__call__" - ) as call: + with mock.patch.object(type(client.transport.list_dlp_jobs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = dlp.ListDlpJobsResponse() @@ -5405,7 +5647,7 @@ def test_list_dlp_jobs_pager(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_dlp_jobs), "__call__") as call: + with mock.patch.object(type(client.transport.list_dlp_jobs), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( dlp.ListDlpJobsResponse( @@ -5434,7 +5676,7 @@ def test_list_dlp_jobs_pages(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials,) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.list_dlp_jobs), "__call__") as call: + with mock.patch.object(type(client.transport.list_dlp_jobs), "__call__") as call: # Set the response to a series of pages. call.side_effect = ( dlp.ListDlpJobsResponse( @@ -5446,8 +5688,8 @@ def test_list_dlp_jobs_pages(): RuntimeError, ) pages = list(client.list_dlp_jobs(request={}).pages) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.asyncio @@ -5456,9 +5698,7 @@ async def test_list_dlp_jobs_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_dlp_jobs), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_dlp_jobs), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -5486,9 +5726,7 @@ async def test_list_dlp_jobs_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_dlp_jobs), - "__call__", - new_callable=mock.AsyncMock, + type(client.transport.list_dlp_jobs), "__call__", new_callable=mock.AsyncMock ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -5501,10 +5739,10 @@ async def test_list_dlp_jobs_async_pages(): RuntimeError, ) pages = [] - async for page in (await client.list_dlp_jobs(request={})).pages: - pages.append(page) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + async for page_ in (await client.list_dlp_jobs(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token def test_get_dlp_job(transport: str = "grpc", request_type=dlp.GetDlpJobRequest): @@ -5517,11 +5755,11 @@ def test_get_dlp_job(transport: str = "grpc", request_type=dlp.GetDlpJobRequest) request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_dlp_job), "__call__") as call: + with mock.patch.object(type(client.transport.get_dlp_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = dlp.DlpJob( name="name_value", - type=dlp.DlpJobType.INSPECT_JOB, + type_=dlp.DlpJobType.INSPECT_JOB, state=dlp.DlpJob.JobState.PENDING, job_trigger_name="job_trigger_name_value", risk_details=dlp.AnalyzeDataSourceRiskDetails( @@ -5542,11 +5780,12 @@ def test_get_dlp_job(transport: str = "grpc", request_type=dlp.GetDlpJobRequest) assert args[0] == dlp.GetDlpJobRequest() # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) assert response.name == "name_value" - assert response.type == dlp.DlpJobType.INSPECT_JOB + assert response.type_ == dlp.DlpJobType.INSPECT_JOB assert response.state == dlp.DlpJob.JobState.PENDING @@ -5558,24 +5797,24 @@ def test_get_dlp_job_from_dict(): @pytest.mark.asyncio -async def test_get_dlp_job_async(transport: str = "grpc_asyncio"): +async def test_get_dlp_job_async( + transport: str = "grpc_asyncio", request_type=dlp.GetDlpJobRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.GetDlpJobRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_dlp_job), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_dlp_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.DlpJob( name="name_value", - type=dlp.DlpJobType.INSPECT_JOB, + type_=dlp.DlpJobType.INSPECT_JOB, state=dlp.DlpJob.JobState.PENDING, job_trigger_name="job_trigger_name_value", ) @@ -5587,20 +5826,25 @@ async def test_get_dlp_job_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.GetDlpJobRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.DlpJob) assert response.name == "name_value" - assert response.type == dlp.DlpJobType.INSPECT_JOB + assert response.type_ == dlp.DlpJobType.INSPECT_JOB assert response.state == dlp.DlpJob.JobState.PENDING assert response.job_trigger_name == "job_trigger_name_value" +@pytest.mark.asyncio +async def test_get_dlp_job_async_from_dict(): + await test_get_dlp_job_async(request_type=dict) + + def test_get_dlp_job_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -5610,7 +5854,7 @@ def test_get_dlp_job_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_dlp_job), "__call__") as call: + with mock.patch.object(type(client.transport.get_dlp_job), "__call__") as call: call.return_value = dlp.DlpJob() client.get_dlp_job(request) @@ -5635,9 +5879,7 @@ async def test_get_dlp_job_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_dlp_job), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_dlp_job), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) await client.get_dlp_job(request) @@ -5656,7 +5898,7 @@ def test_get_dlp_job_flattened(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.get_dlp_job), "__call__") as call: + with mock.patch.object(type(client.transport.get_dlp_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = dlp.DlpJob() @@ -5688,9 +5930,7 @@ async def test_get_dlp_job_flattened_async(): client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.get_dlp_job), "__call__" - ) as call: + with mock.patch.object(type(client.transport.get_dlp_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = dlp.DlpJob() @@ -5729,7 +5969,7 @@ def test_delete_dlp_job(transport: str = "grpc", request_type=dlp.DeleteDlpJobRe request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_dlp_job), "__call__") as call: + with mock.patch.object(type(client.transport.delete_dlp_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -5750,19 +5990,19 @@ def test_delete_dlp_job_from_dict(): @pytest.mark.asyncio -async def test_delete_dlp_job_async(transport: str = "grpc_asyncio"): +async def test_delete_dlp_job_async( + transport: str = "grpc_asyncio", request_type=dlp.DeleteDlpJobRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.DeleteDlpJobRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_dlp_job), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_dlp_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -5772,12 +6012,17 @@ async def test_delete_dlp_job_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.DeleteDlpJobRequest() # Establish that the response is the type that we expect. assert response is None +@pytest.mark.asyncio +async def test_delete_dlp_job_async_from_dict(): + await test_delete_dlp_job_async(request_type=dict) + + def test_delete_dlp_job_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -5787,7 +6032,7 @@ def test_delete_dlp_job_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_dlp_job), "__call__") as call: + with mock.patch.object(type(client.transport.delete_dlp_job), "__call__") as call: call.return_value = None client.delete_dlp_job(request) @@ -5812,9 +6057,7 @@ async def test_delete_dlp_job_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_dlp_job), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_dlp_job), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.delete_dlp_job(request) @@ -5833,7 +6076,7 @@ def test_delete_dlp_job_flattened(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.delete_dlp_job), "__call__") as call: + with mock.patch.object(type(client.transport.delete_dlp_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -5865,9 +6108,7 @@ async def test_delete_dlp_job_flattened_async(): client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.delete_dlp_job), "__call__" - ) as call: + with mock.patch.object(type(client.transport.delete_dlp_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -5906,7 +6147,7 @@ def test_cancel_dlp_job(transport: str = "grpc", request_type=dlp.CancelDlpJobRe request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.cancel_dlp_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_dlp_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -5927,19 +6168,19 @@ def test_cancel_dlp_job_from_dict(): @pytest.mark.asyncio -async def test_cancel_dlp_job_async(transport: str = "grpc_asyncio"): +async def test_cancel_dlp_job_async( + transport: str = "grpc_asyncio", request_type=dlp.CancelDlpJobRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.CancelDlpJobRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.cancel_dlp_job), "__call__" - ) as call: + with mock.patch.object(type(client.transport.cancel_dlp_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -5949,12 +6190,17 @@ async def test_cancel_dlp_job_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.CancelDlpJobRequest() # Establish that the response is the type that we expect. assert response is None +@pytest.mark.asyncio +async def test_cancel_dlp_job_async_from_dict(): + await test_cancel_dlp_job_async(request_type=dict) + + def test_cancel_dlp_job_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -5964,7 +6210,7 @@ def test_cancel_dlp_job_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.cancel_dlp_job), "__call__") as call: + with mock.patch.object(type(client.transport.cancel_dlp_job), "__call__") as call: call.return_value = None client.cancel_dlp_job(request) @@ -5989,9 +6235,7 @@ async def test_cancel_dlp_job_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.cancel_dlp_job), "__call__" - ) as call: + with mock.patch.object(type(client.transport.cancel_dlp_job), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.cancel_dlp_job(request) @@ -6019,7 +6263,7 @@ def test_create_stored_info_type( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_stored_info_type), "__call__" + type(client.transport.create_stored_info_type), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.StoredInfoType(name="name_value",) @@ -6033,6 +6277,7 @@ def test_create_stored_info_type( assert args[0] == dlp.CreateStoredInfoTypeRequest() # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) assert response.name == "name_value" @@ -6043,18 +6288,20 @@ def test_create_stored_info_type_from_dict(): @pytest.mark.asyncio -async def test_create_stored_info_type_async(transport: str = "grpc_asyncio"): +async def test_create_stored_info_type_async( + transport: str = "grpc_asyncio", request_type=dlp.CreateStoredInfoTypeRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.CreateStoredInfoTypeRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_stored_info_type), "__call__" + type(client.transport.create_stored_info_type), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -6067,7 +6314,7 @@ async def test_create_stored_info_type_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.CreateStoredInfoTypeRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.StoredInfoType) @@ -6075,6 +6322,11 @@ async def test_create_stored_info_type_async(transport: str = "grpc_asyncio"): assert response.name == "name_value" +@pytest.mark.asyncio +async def test_create_stored_info_type_async_from_dict(): + await test_create_stored_info_type_async(request_type=dict) + + def test_create_stored_info_type_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -6085,7 +6337,7 @@ def test_create_stored_info_type_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_stored_info_type), "__call__" + type(client.transport.create_stored_info_type), "__call__" ) as call: call.return_value = dlp.StoredInfoType() @@ -6112,7 +6364,7 @@ async def test_create_stored_info_type_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_stored_info_type), "__call__" + type(client.transport.create_stored_info_type), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) @@ -6133,7 +6385,7 @@ def test_create_stored_info_type_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.create_stored_info_type), "__call__" + type(client.transport.create_stored_info_type), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.StoredInfoType() @@ -6176,7 +6428,7 @@ async def test_create_stored_info_type_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.create_stored_info_type), "__call__" + type(client.transport.create_stored_info_type), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.StoredInfoType() @@ -6228,7 +6480,7 @@ def test_update_stored_info_type( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_stored_info_type), "__call__" + type(client.transport.update_stored_info_type), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.StoredInfoType(name="name_value",) @@ -6242,6 +6494,7 @@ def test_update_stored_info_type( assert args[0] == dlp.UpdateStoredInfoTypeRequest() # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) assert response.name == "name_value" @@ -6252,18 +6505,20 @@ def test_update_stored_info_type_from_dict(): @pytest.mark.asyncio -async def test_update_stored_info_type_async(transport: str = "grpc_asyncio"): +async def test_update_stored_info_type_async( + transport: str = "grpc_asyncio", request_type=dlp.UpdateStoredInfoTypeRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.UpdateStoredInfoTypeRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_stored_info_type), "__call__" + type(client.transport.update_stored_info_type), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -6276,7 +6531,7 @@ async def test_update_stored_info_type_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.UpdateStoredInfoTypeRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.StoredInfoType) @@ -6284,6 +6539,11 @@ async def test_update_stored_info_type_async(transport: str = "grpc_asyncio"): assert response.name == "name_value" +@pytest.mark.asyncio +async def test_update_stored_info_type_async_from_dict(): + await test_update_stored_info_type_async(request_type=dict) + + def test_update_stored_info_type_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -6294,7 +6554,7 @@ def test_update_stored_info_type_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_stored_info_type), "__call__" + type(client.transport.update_stored_info_type), "__call__" ) as call: call.return_value = dlp.StoredInfoType() @@ -6321,7 +6581,7 @@ async def test_update_stored_info_type_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_stored_info_type), "__call__" + type(client.transport.update_stored_info_type), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) @@ -6342,7 +6602,7 @@ def test_update_stored_info_type_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.update_stored_info_type), "__call__" + type(client.transport.update_stored_info_type), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.StoredInfoType() @@ -6389,7 +6649,7 @@ async def test_update_stored_info_type_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.update_stored_info_type), "__call__" + type(client.transport.update_stored_info_type), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.StoredInfoType() @@ -6445,7 +6705,7 @@ def test_get_stored_info_type( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.get_stored_info_type), "__call__" + type(client.transport.get_stored_info_type), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.StoredInfoType(name="name_value",) @@ -6459,6 +6719,7 @@ def test_get_stored_info_type( assert args[0] == dlp.GetStoredInfoTypeRequest() # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) assert response.name == "name_value" @@ -6469,18 +6730,20 @@ def test_get_stored_info_type_from_dict(): @pytest.mark.asyncio -async def test_get_stored_info_type_async(transport: str = "grpc_asyncio"): +async def test_get_stored_info_type_async( + transport: str = "grpc_asyncio", request_type=dlp.GetStoredInfoTypeRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.GetStoredInfoTypeRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.get_stored_info_type), "__call__" + type(client.transport.get_stored_info_type), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -6493,7 +6756,7 @@ async def test_get_stored_info_type_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.GetStoredInfoTypeRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.StoredInfoType) @@ -6501,6 +6764,11 @@ async def test_get_stored_info_type_async(transport: str = "grpc_asyncio"): assert response.name == "name_value" +@pytest.mark.asyncio +async def test_get_stored_info_type_async_from_dict(): + await test_get_stored_info_type_async(request_type=dict) + + def test_get_stored_info_type_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -6511,7 +6779,7 @@ def test_get_stored_info_type_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.get_stored_info_type), "__call__" + type(client.transport.get_stored_info_type), "__call__" ) as call: call.return_value = dlp.StoredInfoType() @@ -6538,7 +6806,7 @@ async def test_get_stored_info_type_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.get_stored_info_type), "__call__" + type(client.transport.get_stored_info_type), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) @@ -6559,7 +6827,7 @@ def test_get_stored_info_type_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.get_stored_info_type), "__call__" + type(client.transport.get_stored_info_type), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.StoredInfoType() @@ -6593,7 +6861,7 @@ async def test_get_stored_info_type_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.get_stored_info_type), "__call__" + type(client.transport.get_stored_info_type), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.StoredInfoType() @@ -6636,7 +6904,7 @@ def test_list_stored_info_types( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_stored_info_types), "__call__" + type(client.transport.list_stored_info_types), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.ListStoredInfoTypesResponse( @@ -6652,6 +6920,7 @@ def test_list_stored_info_types( assert args[0] == dlp.ListStoredInfoTypesRequest() # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListStoredInfoTypesPager) assert response.next_page_token == "next_page_token_value" @@ -6662,18 +6931,20 @@ def test_list_stored_info_types_from_dict(): @pytest.mark.asyncio -async def test_list_stored_info_types_async(transport: str = "grpc_asyncio"): +async def test_list_stored_info_types_async( + transport: str = "grpc_asyncio", request_type=dlp.ListStoredInfoTypesRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.ListStoredInfoTypesRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_stored_info_types), "__call__" + type(client.transport.list_stored_info_types), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -6686,7 +6957,7 @@ async def test_list_stored_info_types_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.ListStoredInfoTypesRequest() # Establish that the response is the type that we expect. assert isinstance(response, pagers.ListStoredInfoTypesAsyncPager) @@ -6694,6 +6965,11 @@ async def test_list_stored_info_types_async(transport: str = "grpc_asyncio"): assert response.next_page_token == "next_page_token_value" +@pytest.mark.asyncio +async def test_list_stored_info_types_async_from_dict(): + await test_list_stored_info_types_async(request_type=dict) + + def test_list_stored_info_types_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -6704,7 +6980,7 @@ def test_list_stored_info_types_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_stored_info_types), "__call__" + type(client.transport.list_stored_info_types), "__call__" ) as call: call.return_value = dlp.ListStoredInfoTypesResponse() @@ -6731,7 +7007,7 @@ async def test_list_stored_info_types_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_stored_info_types), "__call__" + type(client.transport.list_stored_info_types), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.ListStoredInfoTypesResponse() @@ -6754,7 +7030,7 @@ def test_list_stored_info_types_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_stored_info_types), "__call__" + type(client.transport.list_stored_info_types), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.ListStoredInfoTypesResponse() @@ -6788,7 +7064,7 @@ async def test_list_stored_info_types_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_stored_info_types), "__call__" + type(client.transport.list_stored_info_types), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.ListStoredInfoTypesResponse() @@ -6825,7 +7101,7 @@ def test_list_stored_info_types_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_stored_info_types), "__call__" + type(client.transport.list_stored_info_types), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -6867,7 +7143,7 @@ def test_list_stored_info_types_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.list_stored_info_types), "__call__" + type(client.transport.list_stored_info_types), "__call__" ) as call: # Set the response to a series of pages. call.side_effect = ( @@ -6891,8 +7167,8 @@ def test_list_stored_info_types_pages(): RuntimeError, ) pages = list(client.list_stored_info_types(request={}).pages) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token @pytest.mark.asyncio @@ -6901,7 +7177,7 @@ async def test_list_stored_info_types_async_pager(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_stored_info_types), + type(client.transport.list_stored_info_types), "__call__", new_callable=mock.AsyncMock, ) as call: @@ -6942,7 +7218,7 @@ async def test_list_stored_info_types_async_pages(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.list_stored_info_types), + type(client.transport.list_stored_info_types), "__call__", new_callable=mock.AsyncMock, ) as call: @@ -6968,10 +7244,10 @@ async def test_list_stored_info_types_async_pages(): RuntimeError, ) pages = [] - async for page in (await client.list_stored_info_types(request={})).pages: - pages.append(page) - for page, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page.raw_page.next_page_token == token + async for page_ in (await client.list_stored_info_types(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token def test_delete_stored_info_type( @@ -6987,7 +7263,7 @@ def test_delete_stored_info_type( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_stored_info_type), "__call__" + type(client.transport.delete_stored_info_type), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -7009,18 +7285,20 @@ def test_delete_stored_info_type_from_dict(): @pytest.mark.asyncio -async def test_delete_stored_info_type_async(transport: str = "grpc_asyncio"): +async def test_delete_stored_info_type_async( + transport: str = "grpc_asyncio", request_type=dlp.DeleteStoredInfoTypeRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.DeleteStoredInfoTypeRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_stored_info_type), "__call__" + type(client.transport.delete_stored_info_type), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -7031,12 +7309,17 @@ async def test_delete_stored_info_type_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.DeleteStoredInfoTypeRequest() # Establish that the response is the type that we expect. assert response is None +@pytest.mark.asyncio +async def test_delete_stored_info_type_async_from_dict(): + await test_delete_stored_info_type_async(request_type=dict) + + def test_delete_stored_info_type_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -7047,7 +7330,7 @@ def test_delete_stored_info_type_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_stored_info_type), "__call__" + type(client.transport.delete_stored_info_type), "__call__" ) as call: call.return_value = None @@ -7074,7 +7357,7 @@ async def test_delete_stored_info_type_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_stored_info_type), "__call__" + type(client.transport.delete_stored_info_type), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -7095,7 +7378,7 @@ def test_delete_stored_info_type_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.delete_stored_info_type), "__call__" + type(client.transport.delete_stored_info_type), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -7129,7 +7412,7 @@ async def test_delete_stored_info_type_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.delete_stored_info_type), "__call__" + type(client.transport.delete_stored_info_type), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = None @@ -7172,7 +7455,7 @@ def test_hybrid_inspect_dlp_job( # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.hybrid_inspect_dlp_job), "__call__" + type(client.transport.hybrid_inspect_dlp_job), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.HybridInspectResponse() @@ -7186,6 +7469,7 @@ def test_hybrid_inspect_dlp_job( assert args[0] == dlp.HybridInspectDlpJobRequest() # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) @@ -7194,18 +7478,20 @@ def test_hybrid_inspect_dlp_job_from_dict(): @pytest.mark.asyncio -async def test_hybrid_inspect_dlp_job_async(transport: str = "grpc_asyncio"): +async def test_hybrid_inspect_dlp_job_async( + transport: str = "grpc_asyncio", request_type=dlp.HybridInspectDlpJobRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.HybridInspectDlpJobRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.hybrid_inspect_dlp_job), "__call__" + type(client.transport.hybrid_inspect_dlp_job), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( @@ -7218,12 +7504,17 @@ async def test_hybrid_inspect_dlp_job_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.HybridInspectDlpJobRequest() # Establish that the response is the type that we expect. assert isinstance(response, dlp.HybridInspectResponse) +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_async_from_dict(): + await test_hybrid_inspect_dlp_job_async(request_type=dict) + + def test_hybrid_inspect_dlp_job_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -7234,7 +7525,7 @@ def test_hybrid_inspect_dlp_job_field_headers(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.hybrid_inspect_dlp_job), "__call__" + type(client.transport.hybrid_inspect_dlp_job), "__call__" ) as call: call.return_value = dlp.HybridInspectResponse() @@ -7261,7 +7552,7 @@ async def test_hybrid_inspect_dlp_job_field_headers_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.hybrid_inspect_dlp_job), "__call__" + type(client.transport.hybrid_inspect_dlp_job), "__call__" ) as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( dlp.HybridInspectResponse() @@ -7284,7 +7575,7 @@ def test_hybrid_inspect_dlp_job_flattened(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._transport.hybrid_inspect_dlp_job), "__call__" + type(client.transport.hybrid_inspect_dlp_job), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.HybridInspectResponse() @@ -7318,7 +7609,7 @@ async def test_hybrid_inspect_dlp_job_flattened_async(): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( - type(client._client._transport.hybrid_inspect_dlp_job), "__call__" + type(client.transport.hybrid_inspect_dlp_job), "__call__" ) as call: # Designate an appropriate return value for the call. call.return_value = dlp.HybridInspectResponse() @@ -7360,7 +7651,7 @@ def test_finish_dlp_job(transport: str = "grpc", request_type=dlp.FinishDlpJobRe request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.finish_dlp_job), "__call__") as call: + with mock.patch.object(type(client.transport.finish_dlp_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = None @@ -7381,19 +7672,19 @@ def test_finish_dlp_job_from_dict(): @pytest.mark.asyncio -async def test_finish_dlp_job_async(transport: str = "grpc_asyncio"): +async def test_finish_dlp_job_async( + transport: str = "grpc_asyncio", request_type=dlp.FinishDlpJobRequest +): client = DlpServiceAsyncClient( credentials=credentials.AnonymousCredentials(), transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, # and we are mocking out the actual API, so just send an empty request. - request = dlp.FinishDlpJobRequest() + request = request_type() # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.finish_dlp_job), "__call__" - ) as call: + with mock.patch.object(type(client.transport.finish_dlp_job), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) @@ -7403,12 +7694,17 @@ async def test_finish_dlp_job_async(transport: str = "grpc_asyncio"): assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0] == request + assert args[0] == dlp.FinishDlpJobRequest() # Establish that the response is the type that we expect. assert response is None +@pytest.mark.asyncio +async def test_finish_dlp_job_async_from_dict(): + await test_finish_dlp_job_async(request_type=dict) + + def test_finish_dlp_job_field_headers(): client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) @@ -7418,7 +7714,7 @@ def test_finish_dlp_job_field_headers(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client._transport.finish_dlp_job), "__call__") as call: + with mock.patch.object(type(client.transport.finish_dlp_job), "__call__") as call: call.return_value = None client.finish_dlp_job(request) @@ -7443,9 +7739,7 @@ async def test_finish_dlp_job_field_headers_async(): request.name = "name/value" # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client._client._transport.finish_dlp_job), "__call__" - ) as call: + with mock.patch.object(type(client.transport.finish_dlp_job), "__call__") as call: call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) await client.finish_dlp_job(request) @@ -7496,7 +7790,7 @@ def test_transport_instance(): credentials=credentials.AnonymousCredentials(), ) client = DlpServiceClient(transport=transport) - assert client._transport is transport + assert client.transport is transport def test_transport_get_channel(): @@ -7514,10 +7808,22 @@ def test_transport_get_channel(): assert channel +@pytest.mark.parametrize( + "transport_class", + [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) - assert isinstance(client._transport, transports.DlpServiceGrpcTransport,) + assert isinstance(client.transport, transports.DlpServiceGrpcTransport,) def test_dlp_service_base_transport_error(): @@ -7601,6 +7907,17 @@ def test_dlp_service_base_transport_with_credentials_file(): ) +def test_dlp_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(auth, "default") as adc, mock.patch( + "google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + adc.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.DlpServiceTransport() + adc.assert_called_once() + + def test_dlp_service_auth_adc(): # If no credentials are provided, we should use ADC credentials. with mock.patch.object(auth, "default") as adc: @@ -7631,7 +7948,7 @@ def test_dlp_service_host_no_port(): credentials=credentials.AnonymousCredentials(), client_options=client_options.ClientOptions(api_endpoint="dlp.googleapis.com"), ) - assert client._transport._host == "dlp.googleapis.com:443" + assert client.transport._host == "dlp.googleapis.com:443" def test_dlp_service_host_with_port(): @@ -7641,190 +7958,231 @@ def test_dlp_service_host_with_port(): api_endpoint="dlp.googleapis.com:8000" ), ) - assert client._transport._host == "dlp.googleapis.com:8000" + assert client.transport._host == "dlp.googleapis.com:8000" def test_dlp_service_grpc_transport_channel(): channel = grpc.insecure_channel("http://localhost/") - # Check that if channel is provided, mtls endpoint and client_cert_source - # won't be used. - callback = mock.MagicMock() + # Check that channel is used if provided. transport = transports.DlpServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=callback, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" - assert not callback.called + assert transport._ssl_channel_credentials == None def test_dlp_service_grpc_asyncio_transport_channel(): channel = aio.insecure_channel("http://localhost/") - # Check that if channel is provided, mtls endpoint and client_cert_source - # won't be used. - callback = mock.MagicMock() + # Check that channel is used if provided. transport = transports.DlpServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=callback, + host="squid.clam.whelk", channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" - assert not callback.called + assert transport._ssl_channel_credentials == None -@mock.patch("grpc.ssl_channel_credentials", autospec=True) -@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) -def test_dlp_service_grpc_transport_channel_mtls_with_client_cert_source( - grpc_create_channel, grpc_ssl_channel_cred -): - # Check that if channel is None, but api_mtls_endpoint and client_cert_source - # are provided, then a mTLS channel will be created. - mock_cred = mock.Mock() +@pytest.mark.parametrize( + "transport_class", + [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport], +) +def test_dlp_service_transport_channel_mtls_with_client_cert_source(transport_class): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +@pytest.mark.parametrize( + "transport_class", + [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport], +) +def test_dlp_service_transport_channel_mtls_with_adc(transport_class): mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel", autospec=True + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel - transport = transports.DlpServiceGrpcTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" + +def test_deidentify_template_path(): + organization = "squid" + deidentify_template = "clam" + + expected = "organizations/{organization}/deidentifyTemplates/{deidentify_template}".format( + organization=organization, deidentify_template=deidentify_template, ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, + actual = DlpServiceClient.deidentify_template_path( + organization, deidentify_template ) - assert transport.grpc_channel == mock_grpc_channel + assert expected == actual -@mock.patch("grpc.ssl_channel_credentials", autospec=True) -@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) -def test_dlp_service_grpc_asyncio_transport_channel_mtls_with_client_cert_source( - grpc_create_channel, grpc_ssl_channel_cred -): - # Check that if channel is None, but api_mtls_endpoint and client_cert_source - # are provided, then a mTLS channel will be created. - mock_cred = mock.Mock() +def test_parse_deidentify_template_path(): + expected = { + "organization": "whelk", + "deidentify_template": "octopus", + } + path = DlpServiceClient.deidentify_template_path(**expected) - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_deidentify_template_path(path) + assert expected == actual - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - transport = transports.DlpServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" +def test_dlp_content_path(): + project = "oyster" + + expected = "projects/{project}/dlpContent".format(project=project,) + actual = DlpServiceClient.dlp_content_path(project) + assert expected == actual + + +def test_parse_dlp_content_path(): + expected = { + "project": "nudibranch", + } + path = DlpServiceClient.dlp_content_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_dlp_content_path(path) + assert expected == actual + + +def test_dlp_job_path(): + project = "cuttlefish" + dlp_job = "mussel" + + expected = "projects/{project}/dlpJobs/{dlp_job}".format( + project=project, dlp_job=dlp_job, ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, + actual = DlpServiceClient.dlp_job_path(project, dlp_job) + assert expected == actual + + +def test_parse_dlp_job_path(): + expected = { + "project": "winkle", + "dlp_job": "nautilus", + } + path = DlpServiceClient.dlp_job_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_dlp_job_path(path) + assert expected == actual + + +def test_finding_path(): + project = "scallop" + location = "abalone" + finding = "squid" + + expected = "projects/{project}/locations/{location}/findings/{finding}".format( + project=project, location=location, finding=finding, ) - assert transport.grpc_channel == mock_grpc_channel + actual = DlpServiceClient.finding_path(project, location, finding) + assert expected == actual -@pytest.mark.parametrize( - "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] -) -@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) -def test_dlp_service_grpc_transport_channel_mtls_with_adc( - grpc_create_channel, api_mtls_endpoint -): - # Check that if channel and client_cert_source are None, but api_mtls_endpoint - # is provided, then a mTLS channel will be created with SSL ADC. - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel +def test_parse_finding_path(): + expected = { + "project": "clam", + "location": "whelk", + "finding": "octopus", + } + path = DlpServiceClient.finding_path(**expected) - # Mock google.auth.transport.grpc.SslCredentials class. - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - mock_cred = mock.Mock() - transport = transports.DlpServiceGrpcTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint=api_mtls_endpoint, - client_cert_source=None, - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_finding_path(path) + assert expected == actual -@pytest.mark.parametrize( - "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] -) -@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) -def test_dlp_service_grpc_asyncio_transport_channel_mtls_with_adc( - grpc_create_channel, api_mtls_endpoint -): - # Check that if channel and client_cert_source are None, but api_mtls_endpoint - # is provided, then a mTLS channel will be created with SSL ADC. - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel +def test_inspect_template_path(): + organization = "oyster" + inspect_template = "nudibranch" - # Mock google.auth.transport.grpc.SslCredentials class. - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - mock_cred = mock.Mock() - transport = transports.DlpServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint=api_mtls_endpoint, - client_cert_source=None, - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - ) - assert transport.grpc_channel == mock_grpc_channel + expected = "organizations/{organization}/inspectTemplates/{inspect_template}".format( + organization=organization, inspect_template=inspect_template, + ) + actual = DlpServiceClient.inspect_template_path(organization, inspect_template) + assert expected == actual + + +def test_parse_inspect_template_path(): + expected = { + "organization": "cuttlefish", + "inspect_template": "mussel", + } + path = DlpServiceClient.inspect_template_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_inspect_template_path(path) + assert expected == actual def test_job_trigger_path(): - project = "squid" - job_trigger = "clam" + project = "winkle" + job_trigger = "nautilus" expected = "projects/{project}/jobTriggers/{job_trigger}".format( project=project, job_trigger=job_trigger, @@ -7835,8 +8193,8 @@ def test_job_trigger_path(): def test_parse_job_trigger_path(): expected = { - "project": "whelk", - "job_trigger": "octopus", + "project": "scallop", + "job_trigger": "abalone", } path = DlpServiceClient.job_trigger_path(**expected) @@ -7845,49 +8203,146 @@ def test_parse_job_trigger_path(): assert expected == actual -def test_inspect_template_path(): +def test_stored_info_type_path(): organization = "squid" - inspect_template = "clam" + stored_info_type = "clam" - expected = "organizations/{organization}/inspectTemplates/{inspect_template}".format( - organization=organization, inspect_template=inspect_template, + expected = "organizations/{organization}/storedInfoTypes/{stored_info_type}".format( + organization=organization, stored_info_type=stored_info_type, ) - actual = DlpServiceClient.inspect_template_path(organization, inspect_template) + actual = DlpServiceClient.stored_info_type_path(organization, stored_info_type) assert expected == actual -def test_parse_inspect_template_path(): +def test_parse_stored_info_type_path(): expected = { "organization": "whelk", - "inspect_template": "octopus", + "stored_info_type": "octopus", } - path = DlpServiceClient.inspect_template_path(**expected) + path = DlpServiceClient.stored_info_type_path(**expected) # Check that the path construction is reversible. - actual = DlpServiceClient.parse_inspect_template_path(path) + actual = DlpServiceClient.parse_stored_info_type_path(path) assert expected == actual -def test_deidentify_template_path(): - organization = "squid" - deidentify_template = "clam" +def test_common_billing_account_path(): + billing_account = "oyster" - expected = "organizations/{organization}/deidentifyTemplates/{deidentify_template}".format( - organization=organization, deidentify_template=deidentify_template, + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, ) - actual = DlpServiceClient.deidentify_template_path( - organization, deidentify_template + actual = DlpServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = DlpServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "cuttlefish" + + expected = "folders/{folder}".format(folder=folder,) + actual = DlpServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = DlpServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "winkle" + + expected = "organizations/{organization}".format(organization=organization,) + actual = DlpServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = DlpServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "scallop" + + expected = "projects/{project}".format(project=project,) + actual = DlpServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = DlpServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "squid" + location = "clam" + + expected = "projects/{project}/locations/{location}".format( + project=project, location=location, ) + actual = DlpServiceClient.common_location_path(project, location) assert expected == actual -def test_parse_deidentify_template_path(): +def test_parse_common_location_path(): expected = { - "organization": "whelk", - "deidentify_template": "octopus", + "project": "whelk", + "location": "octopus", } - path = DlpServiceClient.deidentify_template_path(**expected) + path = DlpServiceClient.common_location_path(**expected) # Check that the path construction is reversible. - actual = DlpServiceClient.parse_deidentify_template_path(path) + actual = DlpServiceClient.parse_common_location_path(path) assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.DlpServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.DlpServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = DlpServiceClient.get_transport_class() + transport = transport_class( + credentials=credentials.AnonymousCredentials(), client_info=client_info, + ) + prep.assert_called_once_with(client_info)