diff --git a/.coveragerc b/.coveragerc index dd39c854..f158899b 100644 --- a/.coveragerc +++ b/.coveragerc @@ -21,15 +21,14 @@ branch = True [report] fail_under = 100 show_missing = True +omit = google/cloud/dlp/__init__.py exclude_lines = # Re-enable the standard pragma pragma: NO COVER # Ignore debug-only repr def __repr__ - # Ignore abstract methods - raise NotImplementedError -omit = - */gapic/*.py - */proto/*.py - */core/*.py - */site-packages/*.py \ No newline at end of file + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound \ No newline at end of file diff --git a/README.rst b/README.rst index e3ddff03..91a5d91d 100644 --- a/README.rst +++ b/README.rst @@ -51,11 +51,13 @@ dependencies. Supported Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^ -Python >= 3.5 +Python >= 3.6 Deprecated Python Versions ^^^^^^^^^^^^^^^^^^^^^^^^^^ -Python == 2.7. Python 2.7 support will be removed on January 1, 2020. +Python == 2.7. + +The last version of this library compatible with Python 2.7 is google-cloud-dlp==1.1.0. Mac/Linux diff --git a/UPGRADING.md b/UPGRADING.md new file mode 100644 index 00000000..ab625aa9 --- /dev/null +++ b/UPGRADING.md @@ -0,0 +1,161 @@ +# 2.0.0 Migration Guide + +The 2.0 release of the `google-cloud-dlp` client is a significant upgrade based on a [next-gen code generator](https://github.com/googleapis/gapic-generator-python), and includes substantial interface changes. Existing code written for earlier versions of this library will likely require updates to use this version. This document describes the changes that have been made, and what you need to do to update your usage. + +If you experience issues or have questions, please file an [issue](https://github.com/googleapis/python-dlp/issues). + +## Supported Python Versions + +> **WARNING**: Breaking change + +The 2.0.0 release requires Python 3.6+. + + +## Method Calls + +> **WARNING**: Breaking change + +Methods expect request objects. We provide a script that will convert most common use cases. + +* Install the library + +```py +python3 -m pip install google-cloud-dlp +``` + +* The script `fixup_dlp_v2_keywords.py` is shipped with the library. It expects +an input directory (with the code to convert) and an empty destination directory. + +```sh +$ fixup_dlp_v2_keywords.py --input-directory .samples/ --output-directory samples/ +``` + +**Before:** +```py +from google.cloud import dlp + +client = dlp.DlpServiceClient() + +template = client.get_inspect_template(name="name") +``` + + +**After:** +```py +from google.cloud import dlp + +client = dlp.DlpServiceClient() + +template = client.get_inspect_template(request={"name": "name"}) +``` + +### More Details + +In `google-cloud-dlp<2.0.0`, parameters required by the API were positional parameters and optional parameters were keyword parameters. + +**Before:** +```py + def create_inspect_template( + self, + parent, + inspect_template, + template_id=None, + location_id=None, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): +``` + +In the 2.0.0 release, all methods have a single positional parameter `request`. Method docstrings indicate whether a parameter is required or optional. + +Some methods have additional keyword only parameters. The available parameters depend on the `google.api.method_signature` annotation specified by the API producer. + + +**After:** +```py + def create_inspect_template( + self, + request: dlp.CreateInspectTemplateRequest = None, + *, + parent: str = None, + inspect_template: dlp.InspectTemplate = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: +``` + +> **NOTE:** The `request` parameter and flattened keyword parameters for the API are mutually exclusive. +> Passing both will result in an error. + + +Both of these calls are valid: + +```py +response = client.create_inspect_template( + request={ + "parent": parent, + "inspect_template": inspect_template, + } +) +``` + +```py +response = client.create_inspect_template( + parent=parent, + inspect_template=inspect_template, +) +``` + +This call is invalid because it mixes `request` with a keyword argument `inspect_template`. Executing this code will result in an error. + +```py +response = client.list_builds( + request={ + "parent": parent, + }, + inspect_template=inspect_template +) +``` + + + +## Enums and Types + + +> **WARNING**: Breaking change + +The submodules `enums` and `types` have been removed. + +**Before:** +```py +from google.cloud import dlp + +file_type = dlp.enums.FileType.IMAGE +finding = dlp.types.Finding(name="name") +``` + + +**After:** +```py +from google.cloud import dlp + +file_type = dlp.FileType.IMAGE +finding = dlp.Finding(name="name") +``` + +## Path Helper Methods + +The following path helper methods have been removed. Please construct +these paths manually. + +```py +project = 'my-project' +dlp_job = 'dlp-job' +location = 'location' + +project_path = f'projects/{project}' +dlp_job_path = f'projects/{project}/dlpJobs/{dlp_job}' +location_path = f'projects/{project}/locations/{location}' +``` diff --git a/docs/UPGRADING.md b/docs/UPGRADING.md new file mode 120000 index 00000000..01097c8c --- /dev/null +++ b/docs/UPGRADING.md @@ -0,0 +1 @@ +../UPGRADING.md \ No newline at end of file diff --git a/docs/dlp_v2/services.rst b/docs/dlp_v2/services.rst new file mode 100644 index 00000000..42476c6a --- /dev/null +++ b/docs/dlp_v2/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Dlp v2 API +==================================== + +.. automodule:: google.cloud.dlp_v2.services.dlp_service + :members: + :inherited-members: diff --git a/docs/dlp_v2/types.rst b/docs/dlp_v2/types.rst new file mode 100644 index 00000000..0ce55310 --- /dev/null +++ b/docs/dlp_v2/types.rst @@ -0,0 +1,5 @@ +Types for Google Cloud Dlp v2 API +================================= + +.. automodule:: google.cloud.dlp_v2.types + :members: diff --git a/docs/gapic/v2/api.rst b/docs/gapic/v2/api.rst deleted file mode 100644 index 2e6e3f98..00000000 --- a/docs/gapic/v2/api.rst +++ /dev/null @@ -1,6 +0,0 @@ -Client for Cloud Data Loss Prevention (DLP) API -=============================================== - -.. automodule:: google.cloud.dlp_v2 - :members: - :inherited-members: \ No newline at end of file diff --git a/docs/gapic/v2/types.rst b/docs/gapic/v2/types.rst deleted file mode 100644 index 3c8e8e79..00000000 --- a/docs/gapic/v2/types.rst +++ /dev/null @@ -1,5 +0,0 @@ -Types for Cloud Data Loss Prevention (DLP) API Client -===================================================== - -.. automodule:: google.cloud.dlp_v2.types - :members: \ No newline at end of file diff --git a/docs/index.rst b/docs/index.rst index 87dfbc58..0b5d4512 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -7,10 +7,20 @@ Api Reference .. toctree:: :maxdepth: 2 - gapic/v2/api - gapic/v2/types + dlp_v2/services + dlp_v2/types changelog +Migration Guide +--------------- + +See the guide below for instructions on migrating to the 2.x release of this library. + +.. toctree:: + :maxdepth: 2 + + UPGRADING + Changelog --------- diff --git a/google/cloud/dlp.py b/google/cloud/dlp.py deleted file mode 100644 index 8e70272c..00000000 --- a/google/cloud/dlp.py +++ /dev/null @@ -1,25 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import absolute_import - -from google.cloud.dlp_v2 import DlpServiceClient -from google.cloud.dlp_v2 import enums -from google.cloud.dlp_v2 import types - - -__all__ = ("enums", "types", "DlpServiceClient") diff --git a/google/cloud/dlp/__init__.py b/google/cloud/dlp/__init__.py new file mode 100644 index 00000000..cd1bf675 --- /dev/null +++ b/google/cloud/dlp/__init__.py @@ -0,0 +1,334 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.cloud.dlp_v2.services.dlp_service.async_client import DlpServiceAsyncClient +from google.cloud.dlp_v2.services.dlp_service.client import DlpServiceClient +from google.cloud.dlp_v2.types.dlp import Action +from google.cloud.dlp_v2.types.dlp import ActivateJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import AnalyzeDataSourceRiskDetails +from google.cloud.dlp_v2.types.dlp import BoundingBox +from google.cloud.dlp_v2.types.dlp import BucketingConfig +from google.cloud.dlp_v2.types.dlp import ByteContentItem +from google.cloud.dlp_v2.types.dlp import CancelDlpJobRequest +from google.cloud.dlp_v2.types.dlp import CharacterMaskConfig +from google.cloud.dlp_v2.types.dlp import CharsToIgnore +from google.cloud.dlp_v2.types.dlp import Color +from google.cloud.dlp_v2.types.dlp import Container +from google.cloud.dlp_v2.types.dlp import ContentItem +from google.cloud.dlp_v2.types.dlp import ContentLocation +from google.cloud.dlp_v2.types.dlp import ContentOption +from google.cloud.dlp_v2.types.dlp import CreateDeidentifyTemplateRequest +from google.cloud.dlp_v2.types.dlp import CreateDlpJobRequest +from google.cloud.dlp_v2.types.dlp import CreateInspectTemplateRequest +from google.cloud.dlp_v2.types.dlp import CreateJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import CreateStoredInfoTypeRequest +from google.cloud.dlp_v2.types.dlp import CryptoDeterministicConfig +from google.cloud.dlp_v2.types.dlp import CryptoHashConfig +from google.cloud.dlp_v2.types.dlp import CryptoKey +from google.cloud.dlp_v2.types.dlp import CryptoReplaceFfxFpeConfig +from google.cloud.dlp_v2.types.dlp import DateShiftConfig +from google.cloud.dlp_v2.types.dlp import DateTime +from google.cloud.dlp_v2.types.dlp import DeidentifyConfig +from google.cloud.dlp_v2.types.dlp import DeidentifyContentRequest +from google.cloud.dlp_v2.types.dlp import DeidentifyContentResponse +from google.cloud.dlp_v2.types.dlp import DeidentifyTemplate +from google.cloud.dlp_v2.types.dlp import DeleteDeidentifyTemplateRequest +from google.cloud.dlp_v2.types.dlp import DeleteDlpJobRequest +from google.cloud.dlp_v2.types.dlp import DeleteInspectTemplateRequest +from google.cloud.dlp_v2.types.dlp import DeleteJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import DeleteStoredInfoTypeRequest +from google.cloud.dlp_v2.types.dlp import DlpJob +from google.cloud.dlp_v2.types.dlp import DlpJobType +from google.cloud.dlp_v2.types.dlp import DocumentLocation +from google.cloud.dlp_v2.types.dlp import Error +from google.cloud.dlp_v2.types.dlp import ExcludeInfoTypes +from google.cloud.dlp_v2.types.dlp import ExclusionRule +from google.cloud.dlp_v2.types.dlp import FieldTransformation +from google.cloud.dlp_v2.types.dlp import Finding +from google.cloud.dlp_v2.types.dlp import FinishDlpJobRequest +from google.cloud.dlp_v2.types.dlp import FixedSizeBucketingConfig +from google.cloud.dlp_v2.types.dlp import GetDeidentifyTemplateRequest +from google.cloud.dlp_v2.types.dlp import GetDlpJobRequest +from google.cloud.dlp_v2.types.dlp import GetInspectTemplateRequest +from google.cloud.dlp_v2.types.dlp import GetJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import GetStoredInfoTypeRequest +from google.cloud.dlp_v2.types.dlp import HybridContentItem +from google.cloud.dlp_v2.types.dlp import HybridFindingDetails +from google.cloud.dlp_v2.types.dlp import HybridInspectDlpJobRequest +from google.cloud.dlp_v2.types.dlp import HybridInspectJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import HybridInspectResponse +from google.cloud.dlp_v2.types.dlp import HybridInspectStatistics +from google.cloud.dlp_v2.types.dlp import ImageLocation +from google.cloud.dlp_v2.types.dlp import InfoTypeDescription +from google.cloud.dlp_v2.types.dlp import InfoTypeStats +from google.cloud.dlp_v2.types.dlp import InfoTypeSupportedBy +from google.cloud.dlp_v2.types.dlp import InfoTypeTransformations +from google.cloud.dlp_v2.types.dlp import InspectConfig +from google.cloud.dlp_v2.types.dlp import InspectContentRequest +from google.cloud.dlp_v2.types.dlp import InspectContentResponse +from google.cloud.dlp_v2.types.dlp import InspectDataSourceDetails +from google.cloud.dlp_v2.types.dlp import InspectJobConfig +from google.cloud.dlp_v2.types.dlp import InspectResult +from google.cloud.dlp_v2.types.dlp import InspectTemplate +from google.cloud.dlp_v2.types.dlp import InspectionRule +from google.cloud.dlp_v2.types.dlp import InspectionRuleSet +from google.cloud.dlp_v2.types.dlp import JobTrigger +from google.cloud.dlp_v2.types.dlp import KmsWrappedCryptoKey +from google.cloud.dlp_v2.types.dlp import LargeCustomDictionaryConfig +from google.cloud.dlp_v2.types.dlp import LargeCustomDictionaryStats +from google.cloud.dlp_v2.types.dlp import ListDeidentifyTemplatesRequest +from google.cloud.dlp_v2.types.dlp import ListDeidentifyTemplatesResponse +from google.cloud.dlp_v2.types.dlp import ListDlpJobsRequest +from google.cloud.dlp_v2.types.dlp import ListDlpJobsResponse +from google.cloud.dlp_v2.types.dlp import ListInfoTypesRequest +from google.cloud.dlp_v2.types.dlp import ListInfoTypesResponse +from google.cloud.dlp_v2.types.dlp import ListInspectTemplatesRequest +from google.cloud.dlp_v2.types.dlp import ListInspectTemplatesResponse +from google.cloud.dlp_v2.types.dlp import ListJobTriggersRequest +from google.cloud.dlp_v2.types.dlp import ListJobTriggersResponse +from google.cloud.dlp_v2.types.dlp import ListStoredInfoTypesRequest +from google.cloud.dlp_v2.types.dlp import ListStoredInfoTypesResponse +from google.cloud.dlp_v2.types.dlp import Location +from google.cloud.dlp_v2.types.dlp import Manual +from google.cloud.dlp_v2.types.dlp import MatchingType +from google.cloud.dlp_v2.types.dlp import MetadataLocation +from google.cloud.dlp_v2.types.dlp import MetadataType +from google.cloud.dlp_v2.types.dlp import OutputStorageConfig +from google.cloud.dlp_v2.types.dlp import PrimitiveTransformation +from google.cloud.dlp_v2.types.dlp import PrivacyMetric +from google.cloud.dlp_v2.types.dlp import QuasiId +from google.cloud.dlp_v2.types.dlp import QuoteInfo +from google.cloud.dlp_v2.types.dlp import Range +from google.cloud.dlp_v2.types.dlp import RecordCondition +from google.cloud.dlp_v2.types.dlp import RecordLocation +from google.cloud.dlp_v2.types.dlp import RecordSuppression +from google.cloud.dlp_v2.types.dlp import RecordTransformations +from google.cloud.dlp_v2.types.dlp import RedactConfig +from google.cloud.dlp_v2.types.dlp import RedactImageRequest +from google.cloud.dlp_v2.types.dlp import RedactImageResponse +from google.cloud.dlp_v2.types.dlp import ReidentifyContentRequest +from google.cloud.dlp_v2.types.dlp import ReidentifyContentResponse +from google.cloud.dlp_v2.types.dlp import RelationalOperator +from google.cloud.dlp_v2.types.dlp import ReplaceValueConfig +from google.cloud.dlp_v2.types.dlp import ReplaceWithInfoTypeConfig +from google.cloud.dlp_v2.types.dlp import RiskAnalysisJobConfig +from google.cloud.dlp_v2.types.dlp import Schedule +from google.cloud.dlp_v2.types.dlp import StatisticalTable +from google.cloud.dlp_v2.types.dlp import StorageMetadataLabel +from google.cloud.dlp_v2.types.dlp import StoredInfoType +from google.cloud.dlp_v2.types.dlp import StoredInfoTypeConfig +from google.cloud.dlp_v2.types.dlp import StoredInfoTypeState +from google.cloud.dlp_v2.types.dlp import StoredInfoTypeStats +from google.cloud.dlp_v2.types.dlp import StoredInfoTypeVersion +from google.cloud.dlp_v2.types.dlp import Table +from google.cloud.dlp_v2.types.dlp import TableLocation +from google.cloud.dlp_v2.types.dlp import TimePartConfig +from google.cloud.dlp_v2.types.dlp import TransformationErrorHandling +from google.cloud.dlp_v2.types.dlp import TransformationOverview +from google.cloud.dlp_v2.types.dlp import TransformationSummary +from google.cloud.dlp_v2.types.dlp import TransientCryptoKey +from google.cloud.dlp_v2.types.dlp import UnwrappedCryptoKey +from google.cloud.dlp_v2.types.dlp import UpdateDeidentifyTemplateRequest +from google.cloud.dlp_v2.types.dlp import UpdateInspectTemplateRequest +from google.cloud.dlp_v2.types.dlp import UpdateJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import UpdateStoredInfoTypeRequest +from google.cloud.dlp_v2.types.dlp import Value +from google.cloud.dlp_v2.types.dlp import ValueFrequency +from google.cloud.dlp_v2.types.storage import BigQueryField +from google.cloud.dlp_v2.types.storage import BigQueryKey +from google.cloud.dlp_v2.types.storage import BigQueryOptions +from google.cloud.dlp_v2.types.storage import BigQueryTable +from google.cloud.dlp_v2.types.storage import CloudStorageFileSet +from google.cloud.dlp_v2.types.storage import CloudStorageOptions +from google.cloud.dlp_v2.types.storage import CloudStoragePath +from google.cloud.dlp_v2.types.storage import CloudStorageRegexFileSet +from google.cloud.dlp_v2.types.storage import CustomInfoType +from google.cloud.dlp_v2.types.storage import DatastoreKey +from google.cloud.dlp_v2.types.storage import DatastoreOptions +from google.cloud.dlp_v2.types.storage import EntityId +from google.cloud.dlp_v2.types.storage import FieldId +from google.cloud.dlp_v2.types.storage import FileType +from google.cloud.dlp_v2.types.storage import HybridOptions +from google.cloud.dlp_v2.types.storage import InfoType +from google.cloud.dlp_v2.types.storage import Key +from google.cloud.dlp_v2.types.storage import KindExpression +from google.cloud.dlp_v2.types.storage import Likelihood +from google.cloud.dlp_v2.types.storage import PartitionId +from google.cloud.dlp_v2.types.storage import RecordKey +from google.cloud.dlp_v2.types.storage import StorageConfig +from google.cloud.dlp_v2.types.storage import StoredType +from google.cloud.dlp_v2.types.storage import TableOptions + +__all__ = ( + "Action", + "ActivateJobTriggerRequest", + "AnalyzeDataSourceRiskDetails", + "BigQueryField", + "BigQueryKey", + "BigQueryOptions", + "BigQueryTable", + "BoundingBox", + "BucketingConfig", + "ByteContentItem", + "CancelDlpJobRequest", + "CharacterMaskConfig", + "CharsToIgnore", + "CloudStorageFileSet", + "CloudStorageOptions", + "CloudStoragePath", + "CloudStorageRegexFileSet", + "Color", + "Container", + "ContentItem", + "ContentLocation", + "ContentOption", + "CreateDeidentifyTemplateRequest", + "CreateDlpJobRequest", + "CreateInspectTemplateRequest", + "CreateJobTriggerRequest", + "CreateStoredInfoTypeRequest", + "CryptoDeterministicConfig", + "CryptoHashConfig", + "CryptoKey", + "CryptoReplaceFfxFpeConfig", + "CustomInfoType", + "DatastoreKey", + "DatastoreOptions", + "DateShiftConfig", + "DateTime", + "DeidentifyConfig", + "DeidentifyContentRequest", + "DeidentifyContentResponse", + "DeidentifyTemplate", + "DeleteDeidentifyTemplateRequest", + "DeleteDlpJobRequest", + "DeleteInspectTemplateRequest", + "DeleteJobTriggerRequest", + "DeleteStoredInfoTypeRequest", + "DlpJob", + "DlpJobType", + "DlpServiceAsyncClient", + "DlpServiceClient", + "DocumentLocation", + "EntityId", + "Error", + "ExcludeInfoTypes", + "ExclusionRule", + "FieldId", + "FieldTransformation", + "FileType", + "Finding", + "FinishDlpJobRequest", + "FixedSizeBucketingConfig", + "GetDeidentifyTemplateRequest", + "GetDlpJobRequest", + "GetInspectTemplateRequest", + "GetJobTriggerRequest", + "GetStoredInfoTypeRequest", + "HybridContentItem", + "HybridFindingDetails", + "HybridInspectDlpJobRequest", + "HybridInspectJobTriggerRequest", + "HybridInspectResponse", + "HybridInspectStatistics", + "HybridOptions", + "ImageLocation", + "InfoType", + "InfoTypeDescription", + "InfoTypeStats", + "InfoTypeSupportedBy", + "InfoTypeTransformations", + "InspectConfig", + "InspectContentRequest", + "InspectContentResponse", + "InspectDataSourceDetails", + "InspectJobConfig", + "InspectResult", + "InspectTemplate", + "InspectionRule", + "InspectionRuleSet", + "JobTrigger", + "Key", + "KindExpression", + "KmsWrappedCryptoKey", + "LargeCustomDictionaryConfig", + "LargeCustomDictionaryStats", + "Likelihood", + "ListDeidentifyTemplatesRequest", + "ListDeidentifyTemplatesResponse", + "ListDlpJobsRequest", + "ListDlpJobsResponse", + "ListInfoTypesRequest", + "ListInfoTypesResponse", + "ListInspectTemplatesRequest", + "ListInspectTemplatesResponse", + "ListJobTriggersRequest", + "ListJobTriggersResponse", + "ListStoredInfoTypesRequest", + "ListStoredInfoTypesResponse", + "Location", + "Manual", + "MatchingType", + "MetadataLocation", + "MetadataType", + "OutputStorageConfig", + "PartitionId", + "PrimitiveTransformation", + "PrivacyMetric", + "QuasiId", + "QuoteInfo", + "Range", + "RecordCondition", + "RecordKey", + "RecordLocation", + "RecordSuppression", + "RecordTransformations", + "RedactConfig", + "RedactImageRequest", + "RedactImageResponse", + "ReidentifyContentRequest", + "ReidentifyContentResponse", + "RelationalOperator", + "ReplaceValueConfig", + "ReplaceWithInfoTypeConfig", + "RiskAnalysisJobConfig", + "Schedule", + "StatisticalTable", + "StorageConfig", + "StorageMetadataLabel", + "StoredInfoType", + "StoredInfoTypeConfig", + "StoredInfoTypeState", + "StoredInfoTypeStats", + "StoredInfoTypeVersion", + "StoredType", + "Table", + "TableLocation", + "TableOptions", + "TimePartConfig", + "TransformationErrorHandling", + "TransformationOverview", + "TransformationSummary", + "TransientCryptoKey", + "UnwrappedCryptoKey", + "UpdateDeidentifyTemplateRequest", + "UpdateInspectTemplateRequest", + "UpdateJobTriggerRequest", + "UpdateStoredInfoTypeRequest", + "Value", + "ValueFrequency", +) diff --git a/google/cloud/dlp/py.typed b/google/cloud/dlp/py.typed new file mode 100644 index 00000000..23d89ef3 --- /dev/null +++ b/google/cloud/dlp/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-dlp package uses inline types. diff --git a/google/cloud/dlp_v2/__init__.py b/google/cloud/dlp_v2/__init__.py index 48ef10a6..73bacd1a 100644 --- a/google/cloud/dlp_v2/__init__.py +++ b/google/cloud/dlp_v2/__init__.py @@ -1,41 +1,333 @@ # -*- coding: utf-8 -*- -# + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# - -from __future__ import absolute_import -import sys -import warnings - -from google.cloud.dlp_v2 import types -from google.cloud.dlp_v2.gapic import dlp_service_client -from google.cloud.dlp_v2.gapic import enums - - -if sys.version_info[:2] == (2, 7): - message = ( - "A future version of this library will drop support for Python 2.7." - "More details about Python 2 support for Google Cloud Client Libraries" - "can be found at https://cloud.google.com/python/docs/python2-sunset/" - ) - warnings.warn(message, DeprecationWarning) - - -class DlpServiceClient(dlp_service_client.DlpServiceClient): - __doc__ = dlp_service_client.DlpServiceClient.__doc__ - enums = enums +from .services.dlp_service import DlpServiceClient +from .types.dlp import Action +from .types.dlp import ActivateJobTriggerRequest +from .types.dlp import AnalyzeDataSourceRiskDetails +from .types.dlp import BoundingBox +from .types.dlp import BucketingConfig +from .types.dlp import ByteContentItem +from .types.dlp import CancelDlpJobRequest +from .types.dlp import CharacterMaskConfig +from .types.dlp import CharsToIgnore +from .types.dlp import Color +from .types.dlp import Container +from .types.dlp import ContentItem +from .types.dlp import ContentLocation +from .types.dlp import ContentOption +from .types.dlp import CreateDeidentifyTemplateRequest +from .types.dlp import CreateDlpJobRequest +from .types.dlp import CreateInspectTemplateRequest +from .types.dlp import CreateJobTriggerRequest +from .types.dlp import CreateStoredInfoTypeRequest +from .types.dlp import CryptoDeterministicConfig +from .types.dlp import CryptoHashConfig +from .types.dlp import CryptoKey +from .types.dlp import CryptoReplaceFfxFpeConfig +from .types.dlp import DateShiftConfig +from .types.dlp import DateTime +from .types.dlp import DeidentifyConfig +from .types.dlp import DeidentifyContentRequest +from .types.dlp import DeidentifyContentResponse +from .types.dlp import DeidentifyTemplate +from .types.dlp import DeleteDeidentifyTemplateRequest +from .types.dlp import DeleteDlpJobRequest +from .types.dlp import DeleteInspectTemplateRequest +from .types.dlp import DeleteJobTriggerRequest +from .types.dlp import DeleteStoredInfoTypeRequest +from .types.dlp import DlpJob +from .types.dlp import DlpJobType +from .types.dlp import DocumentLocation +from .types.dlp import Error +from .types.dlp import ExcludeInfoTypes +from .types.dlp import ExclusionRule +from .types.dlp import FieldTransformation +from .types.dlp import Finding +from .types.dlp import FinishDlpJobRequest +from .types.dlp import FixedSizeBucketingConfig +from .types.dlp import GetDeidentifyTemplateRequest +from .types.dlp import GetDlpJobRequest +from .types.dlp import GetInspectTemplateRequest +from .types.dlp import GetJobTriggerRequest +from .types.dlp import GetStoredInfoTypeRequest +from .types.dlp import HybridContentItem +from .types.dlp import HybridFindingDetails +from .types.dlp import HybridInspectDlpJobRequest +from .types.dlp import HybridInspectJobTriggerRequest +from .types.dlp import HybridInspectResponse +from .types.dlp import HybridInspectStatistics +from .types.dlp import ImageLocation +from .types.dlp import InfoTypeDescription +from .types.dlp import InfoTypeStats +from .types.dlp import InfoTypeSupportedBy +from .types.dlp import InfoTypeTransformations +from .types.dlp import InspectConfig +from .types.dlp import InspectContentRequest +from .types.dlp import InspectContentResponse +from .types.dlp import InspectDataSourceDetails +from .types.dlp import InspectJobConfig +from .types.dlp import InspectResult +from .types.dlp import InspectTemplate +from .types.dlp import InspectionRule +from .types.dlp import InspectionRuleSet +from .types.dlp import JobTrigger +from .types.dlp import KmsWrappedCryptoKey +from .types.dlp import LargeCustomDictionaryConfig +from .types.dlp import LargeCustomDictionaryStats +from .types.dlp import ListDeidentifyTemplatesRequest +from .types.dlp import ListDeidentifyTemplatesResponse +from .types.dlp import ListDlpJobsRequest +from .types.dlp import ListDlpJobsResponse +from .types.dlp import ListInfoTypesRequest +from .types.dlp import ListInfoTypesResponse +from .types.dlp import ListInspectTemplatesRequest +from .types.dlp import ListInspectTemplatesResponse +from .types.dlp import ListJobTriggersRequest +from .types.dlp import ListJobTriggersResponse +from .types.dlp import ListStoredInfoTypesRequest +from .types.dlp import ListStoredInfoTypesResponse +from .types.dlp import Location +from .types.dlp import Manual +from .types.dlp import MatchingType +from .types.dlp import MetadataLocation +from .types.dlp import MetadataType +from .types.dlp import OutputStorageConfig +from .types.dlp import PrimitiveTransformation +from .types.dlp import PrivacyMetric +from .types.dlp import QuasiId +from .types.dlp import QuoteInfo +from .types.dlp import Range +from .types.dlp import RecordCondition +from .types.dlp import RecordLocation +from .types.dlp import RecordSuppression +from .types.dlp import RecordTransformations +from .types.dlp import RedactConfig +from .types.dlp import RedactImageRequest +from .types.dlp import RedactImageResponse +from .types.dlp import ReidentifyContentRequest +from .types.dlp import ReidentifyContentResponse +from .types.dlp import RelationalOperator +from .types.dlp import ReplaceValueConfig +from .types.dlp import ReplaceWithInfoTypeConfig +from .types.dlp import RiskAnalysisJobConfig +from .types.dlp import Schedule +from .types.dlp import StatisticalTable +from .types.dlp import StorageMetadataLabel +from .types.dlp import StoredInfoType +from .types.dlp import StoredInfoTypeConfig +from .types.dlp import StoredInfoTypeState +from .types.dlp import StoredInfoTypeStats +from .types.dlp import StoredInfoTypeVersion +from .types.dlp import Table +from .types.dlp import TableLocation +from .types.dlp import TimePartConfig +from .types.dlp import TransformationErrorHandling +from .types.dlp import TransformationOverview +from .types.dlp import TransformationSummary +from .types.dlp import TransientCryptoKey +from .types.dlp import UnwrappedCryptoKey +from .types.dlp import UpdateDeidentifyTemplateRequest +from .types.dlp import UpdateInspectTemplateRequest +from .types.dlp import UpdateJobTriggerRequest +from .types.dlp import UpdateStoredInfoTypeRequest +from .types.dlp import Value +from .types.dlp import ValueFrequency +from .types.storage import BigQueryField +from .types.storage import BigQueryKey +from .types.storage import BigQueryOptions +from .types.storage import BigQueryTable +from .types.storage import CloudStorageFileSet +from .types.storage import CloudStorageOptions +from .types.storage import CloudStoragePath +from .types.storage import CloudStorageRegexFileSet +from .types.storage import CustomInfoType +from .types.storage import DatastoreKey +from .types.storage import DatastoreOptions +from .types.storage import EntityId +from .types.storage import FieldId +from .types.storage import FileType +from .types.storage import HybridOptions +from .types.storage import InfoType +from .types.storage import Key +from .types.storage import KindExpression +from .types.storage import Likelihood +from .types.storage import PartitionId +from .types.storage import RecordKey +from .types.storage import StorageConfig +from .types.storage import StoredType +from .types.storage import TableOptions -__all__ = ("enums", "types", "DlpServiceClient") +__all__ = ( + "Action", + "ActivateJobTriggerRequest", + "AnalyzeDataSourceRiskDetails", + "BigQueryField", + "BigQueryKey", + "BigQueryOptions", + "BigQueryTable", + "BoundingBox", + "BucketingConfig", + "ByteContentItem", + "CancelDlpJobRequest", + "CharacterMaskConfig", + "CharsToIgnore", + "CloudStorageFileSet", + "CloudStorageOptions", + "CloudStoragePath", + "CloudStorageRegexFileSet", + "Color", + "Container", + "ContentItem", + "ContentLocation", + "ContentOption", + "CreateDeidentifyTemplateRequest", + "CreateDlpJobRequest", + "CreateInspectTemplateRequest", + "CreateJobTriggerRequest", + "CreateStoredInfoTypeRequest", + "CryptoDeterministicConfig", + "CryptoHashConfig", + "CryptoKey", + "CryptoReplaceFfxFpeConfig", + "CustomInfoType", + "DatastoreKey", + "DatastoreOptions", + "DateShiftConfig", + "DateTime", + "DeidentifyConfig", + "DeidentifyContentRequest", + "DeidentifyContentResponse", + "DeidentifyTemplate", + "DeleteDeidentifyTemplateRequest", + "DeleteDlpJobRequest", + "DeleteInspectTemplateRequest", + "DeleteJobTriggerRequest", + "DeleteStoredInfoTypeRequest", + "DlpJob", + "DlpJobType", + "DocumentLocation", + "EntityId", + "Error", + "ExcludeInfoTypes", + "ExclusionRule", + "FieldId", + "FieldTransformation", + "FileType", + "Finding", + "FinishDlpJobRequest", + "FixedSizeBucketingConfig", + "GetDeidentifyTemplateRequest", + "GetDlpJobRequest", + "GetInspectTemplateRequest", + "GetJobTriggerRequest", + "GetStoredInfoTypeRequest", + "HybridContentItem", + "HybridFindingDetails", + "HybridInspectDlpJobRequest", + "HybridInspectJobTriggerRequest", + "HybridInspectResponse", + "HybridInspectStatistics", + "HybridOptions", + "ImageLocation", + "InfoType", + "InfoTypeDescription", + "InfoTypeStats", + "InfoTypeSupportedBy", + "InfoTypeTransformations", + "InspectConfig", + "InspectContentRequest", + "InspectContentResponse", + "InspectDataSourceDetails", + "InspectJobConfig", + "InspectResult", + "InspectTemplate", + "InspectionRule", + "InspectionRuleSet", + "JobTrigger", + "Key", + "KindExpression", + "KmsWrappedCryptoKey", + "LargeCustomDictionaryConfig", + "LargeCustomDictionaryStats", + "Likelihood", + "ListDeidentifyTemplatesRequest", + "ListDeidentifyTemplatesResponse", + "ListDlpJobsRequest", + "ListDlpJobsResponse", + "ListInfoTypesRequest", + "ListInfoTypesResponse", + "ListInspectTemplatesRequest", + "ListInspectTemplatesResponse", + "ListJobTriggersRequest", + "ListJobTriggersResponse", + "ListStoredInfoTypesRequest", + "ListStoredInfoTypesResponse", + "Location", + "Manual", + "MatchingType", + "MetadataLocation", + "MetadataType", + "OutputStorageConfig", + "PartitionId", + "PrimitiveTransformation", + "PrivacyMetric", + "QuasiId", + "QuoteInfo", + "Range", + "RecordCondition", + "RecordKey", + "RecordLocation", + "RecordSuppression", + "RecordTransformations", + "RedactConfig", + "RedactImageRequest", + "RedactImageResponse", + "ReidentifyContentRequest", + "ReidentifyContentResponse", + "RelationalOperator", + "ReplaceValueConfig", + "ReplaceWithInfoTypeConfig", + "RiskAnalysisJobConfig", + "Schedule", + "StatisticalTable", + "StorageConfig", + "StorageMetadataLabel", + "StoredInfoType", + "StoredInfoTypeConfig", + "StoredInfoTypeState", + "StoredInfoTypeStats", + "StoredInfoTypeVersion", + "StoredType", + "Table", + "TableLocation", + "TableOptions", + "TimePartConfig", + "TransformationErrorHandling", + "TransformationOverview", + "TransformationSummary", + "TransientCryptoKey", + "UnwrappedCryptoKey", + "UpdateDeidentifyTemplateRequest", + "UpdateInspectTemplateRequest", + "UpdateJobTriggerRequest", + "UpdateStoredInfoTypeRequest", + "Value", + "ValueFrequency", + "DlpServiceClient", +) diff --git a/google/cloud/dlp_v2/gapic/__init__.py b/google/cloud/dlp_v2/gapic/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/google/cloud/dlp_v2/gapic/dlp_service_client.py b/google/cloud/dlp_v2/gapic/dlp_service_client.py deleted file mode 100644 index 8ab996e7..00000000 --- a/google/cloud/dlp_v2/gapic/dlp_service_client.py +++ /dev/null @@ -1,3397 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.privacy.dlp.v2 DlpService API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.page_iterator -import google.api_core.path_template -import google.api_core.protobuf_helpers -import grpc - -from google.cloud.dlp_v2.gapic import dlp_service_client_config -from google.cloud.dlp_v2.gapic import enums -from google.cloud.dlp_v2.gapic.transports import dlp_service_grpc_transport -from google.cloud.dlp_v2.proto import dlp_pb2 -from google.cloud.dlp_v2.proto import dlp_pb2_grpc -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution("google-cloud-dlp").version - - -class DlpServiceClient(object): - """ - The Cloud Data Loss Prevention (DLP) API is a service that allows clients - to detect the presence of Personally Identifiable Information (PII) and other - privacy-sensitive data in user-supplied, unstructured data streams, like text - blocks or images. - The service also includes methods for sensitive data redaction and - scheduling of data scans on Google Cloud Platform based data sets. - - To learn more about concepts and find how-to guides see - https://cloud.google.com/dlp/docs/. - """ - - SERVICE_ADDRESS = "dlp.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.privacy.dlp.v2.DlpService" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DlpServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def deidentify_template_path(cls, organization, deidentify_template): - """Return a fully-qualified deidentify_template string.""" - return google.api_core.path_template.expand( - "organizations/{organization}/deidentifyTemplates/{deidentify_template}", - organization=organization, - deidentify_template=deidentify_template, - ) - - @classmethod - def dlp_job_path(cls, project, dlp_job): - """Return a fully-qualified dlp_job string.""" - return google.api_core.path_template.expand( - "projects/{project}/dlpJobs/{dlp_job}", project=project, dlp_job=dlp_job - ) - - @classmethod - def inspect_template_path(cls, organization, inspect_template): - """Return a fully-qualified inspect_template string.""" - return google.api_core.path_template.expand( - "organizations/{organization}/inspectTemplates/{inspect_template}", - organization=organization, - inspect_template=inspect_template, - ) - - @classmethod - def job_trigger_path(cls, project, job_trigger): - """Return a fully-qualified job_trigger string.""" - return google.api_core.path_template.expand( - "projects/{project}/jobTriggers/{job_trigger}", - project=project, - job_trigger=job_trigger, - ) - - @classmethod - def location_path(cls, project, location): - """Return a fully-qualified location string.""" - return google.api_core.path_template.expand( - "projects/{project}/locations/{location}", - project=project, - location=location, - ) - - @classmethod - def organization_path(cls, organization): - """Return a fully-qualified organization string.""" - return google.api_core.path_template.expand( - "organizations/{organization}", organization=organization - ) - - @classmethod - def organization_deidentify_template_path(cls, organization, deidentify_template): - """Return a fully-qualified organization_deidentify_template string.""" - return google.api_core.path_template.expand( - "organizations/{organization}/deidentifyTemplates/{deidentify_template}", - organization=organization, - deidentify_template=deidentify_template, - ) - - @classmethod - def organization_inspect_template_path(cls, organization, inspect_template): - """Return a fully-qualified organization_inspect_template string.""" - return google.api_core.path_template.expand( - "organizations/{organization}/inspectTemplates/{inspect_template}", - organization=organization, - inspect_template=inspect_template, - ) - - @classmethod - def organization_location_path(cls, organization, location): - """Return a fully-qualified organization_location string.""" - return google.api_core.path_template.expand( - "organizations/{organization}/locations/{location}", - organization=organization, - location=location, - ) - - @classmethod - def organization_stored_info_type_path(cls, organization, stored_info_type): - """Return a fully-qualified organization_stored_info_type string.""" - return google.api_core.path_template.expand( - "organizations/{organization}/storedInfoTypes/{stored_info_type}", - organization=organization, - stored_info_type=stored_info_type, - ) - - @classmethod - def project_path(cls, project): - """Return a fully-qualified project string.""" - return google.api_core.path_template.expand( - "projects/{project}", project=project - ) - - @classmethod - def project_deidentify_template_path(cls, project, deidentify_template): - """Return a fully-qualified project_deidentify_template string.""" - return google.api_core.path_template.expand( - "projects/{project}/deidentifyTemplates/{deidentify_template}", - project=project, - deidentify_template=deidentify_template, - ) - - @classmethod - def project_inspect_template_path(cls, project, inspect_template): - """Return a fully-qualified project_inspect_template string.""" - return google.api_core.path_template.expand( - "projects/{project}/inspectTemplates/{inspect_template}", - project=project, - inspect_template=inspect_template, - ) - - @classmethod - def project_job_trigger_path(cls, project, job_trigger): - """Return a fully-qualified project_job_trigger string.""" - return google.api_core.path_template.expand( - "projects/{project}/jobTriggers/{job_trigger}", - project=project, - job_trigger=job_trigger, - ) - - @classmethod - def project_stored_info_type_path(cls, project, stored_info_type): - """Return a fully-qualified project_stored_info_type string.""" - return google.api_core.path_template.expand( - "projects/{project}/storedInfoTypes/{stored_info_type}", - project=project, - stored_info_type=stored_info_type, - ) - - @classmethod - def stored_info_type_path(cls, organization, stored_info_type): - """Return a fully-qualified stored_info_type string.""" - return google.api_core.path_template.expand( - "organizations/{organization}/storedInfoTypes/{stored_info_type}", - organization=organization, - stored_info_type=stored_info_type, - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.DlpServiceGrpcTransport, - Callable[[~.Credentials, type], ~.DlpServiceGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = dlp_service_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=dlp_service_grpc_transport.DlpServiceGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = dlp_service_grpc_transport.DlpServiceGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def inspect_content( - self, - parent=None, - inspect_config=None, - item=None, - inspect_template_name=None, - location_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Finds potentially sensitive info in content. - This method has limits on input size, processing time, and output size. - - When no InfoTypes or CustomInfoTypes are specified in this request, the - system will automatically choose what detectors to run. By default this may - be all types, but may change over time as detectors are updated. - - For how to guides, see https://cloud.google.com/dlp/docs/inspecting-images - and https://cloud.google.com/dlp/docs/inspecting-text, - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> response = client.inspect_content() - - Args: - parent (str): The parent resource name, for example projects/my-project-id or - projects/my-project-id/locations/{location_id} - inspect_config (Union[dict, ~google.cloud.dlp_v2.types.InspectConfig]): Configuration for the inspector. What specified here will override - the template referenced by the inspect_template_name argument. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dlp_v2.types.InspectConfig` - item (Union[dict, ~google.cloud.dlp_v2.types.ContentItem]): The item to inspect. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dlp_v2.types.ContentItem` - inspect_template_name (str): Template to use. Any configuration directly specified in - inspect_config will override those set in the template. Singular fields - that are set in this request will replace their corresponding fields in - the template. Repeated fields are appended. Singular sub-messages and - groups are recursively merged. - location_id (str): Deprecated. This field has no effect. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dlp_v2.types.InspectContentResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "inspect_content" not in self._inner_api_calls: - self._inner_api_calls[ - "inspect_content" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.inspect_content, - default_retry=self._method_configs["InspectContent"].retry, - default_timeout=self._method_configs["InspectContent"].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.InspectContentRequest( - parent=parent, - inspect_config=inspect_config, - item=item, - inspect_template_name=inspect_template_name, - location_id=location_id, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["inspect_content"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def redact_image( - self, - parent=None, - location_id=None, - inspect_config=None, - image_redaction_configs=None, - include_findings=None, - byte_item=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Redacts potentially sensitive info from an image. - This method has limits on input size, processing time, and output size. - See https://cloud.google.com/dlp/docs/redacting-sensitive-data-images to - learn more. - - When no InfoTypes or CustomInfoTypes are specified in this request, the - system will automatically choose what detectors to run. By default this may - be all types, but may change over time as detectors are updated. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> response = client.redact_image() - - Args: - parent (str): The parent resource name, for example projects/my-project-id or - projects/my-project-id/locations/{location_id}. - location_id (str): Deprecated. This field has no effect. - inspect_config (Union[dict, ~google.cloud.dlp_v2.types.InspectConfig]): Configuration for the inspector. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dlp_v2.types.InspectConfig` - image_redaction_configs (list[Union[dict, ~google.cloud.dlp_v2.types.ImageRedactionConfig]]): The configuration for specifying what content to redact from images. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dlp_v2.types.ImageRedactionConfig` - include_findings (bool): Whether the response should include findings along with the redacted - image. - byte_item (Union[dict, ~google.cloud.dlp_v2.types.ByteContentItem]): The content must be PNG, JPEG, SVG or BMP. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dlp_v2.types.ByteContentItem` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dlp_v2.types.RedactImageResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "redact_image" not in self._inner_api_calls: - self._inner_api_calls[ - "redact_image" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.redact_image, - default_retry=self._method_configs["RedactImage"].retry, - default_timeout=self._method_configs["RedactImage"].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.RedactImageRequest( - parent=parent, - location_id=location_id, - inspect_config=inspect_config, - image_redaction_configs=image_redaction_configs, - include_findings=include_findings, - byte_item=byte_item, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["redact_image"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def deidentify_content( - self, - parent=None, - deidentify_config=None, - inspect_config=None, - item=None, - inspect_template_name=None, - deidentify_template_name=None, - location_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - De-identifies potentially sensitive info from a ContentItem. - This method has limits on input size and output size. - See https://cloud.google.com/dlp/docs/deidentify-sensitive-data to - learn more. - - When no InfoTypes or CustomInfoTypes are specified in this request, the - system will automatically choose what detectors to run. By default this may - be all types, but may change over time as detectors are updated. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> response = client.deidentify_content() - - Args: - parent (str): The parent resource name, for example projects/my-project-id or - projects/my-project-id/locations/{location_id}. - deidentify_config (Union[dict, ~google.cloud.dlp_v2.types.DeidentifyConfig]): Configuration for the de-identification of the content item. Items - specified here will override the template referenced by the - deidentify_template_name argument. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dlp_v2.types.DeidentifyConfig` - inspect_config (Union[dict, ~google.cloud.dlp_v2.types.InspectConfig]): Configuration for the inspector. Items specified here will override - the template referenced by the inspect_template_name argument. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dlp_v2.types.InspectConfig` - item (Union[dict, ~google.cloud.dlp_v2.types.ContentItem]): The item to de-identify. Will be treated as text. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dlp_v2.types.ContentItem` - inspect_template_name (str): Template to use. Any configuration directly specified in - inspect_config will override those set in the template. Singular fields - that are set in this request will replace their corresponding fields in - the template. Repeated fields are appended. Singular sub-messages and - groups are recursively merged. - deidentify_template_name (str): Template to use. Any configuration directly specified in - deidentify_config will override those set in the template. Singular - fields that are set in this request will replace their corresponding - fields in the template. Repeated fields are appended. Singular - sub-messages and groups are recursively merged. - location_id (str): Deprecated. This field has no effect. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dlp_v2.types.DeidentifyContentResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "deidentify_content" not in self._inner_api_calls: - self._inner_api_calls[ - "deidentify_content" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.deidentify_content, - default_retry=self._method_configs["DeidentifyContent"].retry, - default_timeout=self._method_configs["DeidentifyContent"].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.DeidentifyContentRequest( - parent=parent, - deidentify_config=deidentify_config, - inspect_config=inspect_config, - item=item, - inspect_template_name=inspect_template_name, - deidentify_template_name=deidentify_template_name, - location_id=location_id, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["deidentify_content"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def reidentify_content( - self, - parent, - reidentify_config=None, - inspect_config=None, - item=None, - inspect_template_name=None, - reidentify_template_name=None, - location_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Re-identifies content that has been de-identified. See - https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example - to learn more. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> response = client.reidentify_content(parent) - - Args: - parent (str): Required. The parent resource name. - reidentify_config (Union[dict, ~google.cloud.dlp_v2.types.DeidentifyConfig]): Configuration for the re-identification of the content item. This - field shares the same proto message type that is used for - de-identification, however its usage here is for the reversal of the - previous de-identification. Re-identification is performed by examining - the transformations used to de-identify the items and executing the - reverse. This requires that only reversible transformations be provided - here. The reversible transformations are: - - - ``CryptoDeterministicConfig`` - - ``CryptoReplaceFfxFpeConfig`` - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dlp_v2.types.DeidentifyConfig` - inspect_config (Union[dict, ~google.cloud.dlp_v2.types.InspectConfig]): Configuration for the inspector. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dlp_v2.types.InspectConfig` - item (Union[dict, ~google.cloud.dlp_v2.types.ContentItem]): The item to re-identify. Will be treated as text. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dlp_v2.types.ContentItem` - inspect_template_name (str): Template to use. Any configuration directly specified in - ``inspect_config`` will override those set in the template. Singular - fields that are set in this request will replace their corresponding - fields in the template. Repeated fields are appended. Singular - sub-messages and groups are recursively merged. - reidentify_template_name (str): Template to use. References an instance of ``DeidentifyTemplate``. - Any configuration directly specified in ``reidentify_config`` or - ``inspect_config`` will override those set in the template. Singular - fields that are set in this request will replace their corresponding - fields in the template. Repeated fields are appended. Singular - sub-messages and groups are recursively merged. - location_id (str): Deprecated. This field has no effect. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dlp_v2.types.ReidentifyContentResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "reidentify_content" not in self._inner_api_calls: - self._inner_api_calls[ - "reidentify_content" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.reidentify_content, - default_retry=self._method_configs["ReidentifyContent"].retry, - default_timeout=self._method_configs["ReidentifyContent"].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.ReidentifyContentRequest( - parent=parent, - reidentify_config=reidentify_config, - inspect_config=inspect_config, - item=item, - inspect_template_name=inspect_template_name, - reidentify_template_name=reidentify_template_name, - location_id=location_id, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["reidentify_content"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_info_types( - self, - parent=None, - language_code=None, - filter_=None, - location_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Returns a list of the sensitive information types that the DLP API - supports. See https://cloud.google.com/dlp/docs/infotypes-reference to - learn more. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> response = client.list_info_types() - - Args: - parent (str): The parent resource name, for example locations/{location_id} - language_code (str): BCP-47 language code for localized infoType friendly - names. If omitted, or if localized strings are not available, - en-US strings will be returned. - filter_ (str): filter to only return infoTypes supported by certain parts of the - API. Defaults to supported_by=INSPECT. - location_id (str): Deprecated. This field has no effect. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dlp_v2.types.ListInfoTypesResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_info_types" not in self._inner_api_calls: - self._inner_api_calls[ - "list_info_types" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_info_types, - default_retry=self._method_configs["ListInfoTypes"].retry, - default_timeout=self._method_configs["ListInfoTypes"].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.ListInfoTypesRequest( - parent=parent, - language_code=language_code, - filter=filter_, - location_id=location_id, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["list_info_types"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def create_inspect_template( - self, - parent, - inspect_template, - template_id=None, - location_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates an InspectTemplate for re-using frequently used configuration - for inspecting content, images, and storage. - See https://cloud.google.com/dlp/docs/creating-templates to learn more. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> parent = client.organization_location_path('[ORGANIZATION]', '[LOCATION]') - >>> - >>> # TODO: Initialize `inspect_template`: - >>> inspect_template = {} - >>> - >>> response = client.create_inspect_template(parent, inspect_template) - - Args: - parent (str): Required. The parent resource name, for example projects/my-project-id or - organizations/my-org-id or projects/my-project-id/locations/{location-id}. - inspect_template (Union[dict, ~google.cloud.dlp_v2.types.InspectTemplate]): Required. The InspectTemplate to create. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dlp_v2.types.InspectTemplate` - template_id (str): The template id can contain uppercase and lowercase letters, - numbers, and hyphens; that is, it must match the regular expression: - ``[a-zA-Z\\d-_]+``. The maximum length is 100 characters. Can be empty - to allow the system to generate one. - location_id (str): Deprecated. This field has no effect. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dlp_v2.types.InspectTemplate` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_inspect_template" not in self._inner_api_calls: - self._inner_api_calls[ - "create_inspect_template" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_inspect_template, - default_retry=self._method_configs["CreateInspectTemplate"].retry, - default_timeout=self._method_configs["CreateInspectTemplate"].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.CreateInspectTemplateRequest( - parent=parent, - inspect_template=inspect_template, - template_id=template_id, - location_id=location_id, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_inspect_template"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_inspect_template( - self, - name, - inspect_template=None, - update_mask=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates the InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates to learn more. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> name = client.organization_inspect_template_path('[ORGANIZATION]', '[INSPECT_TEMPLATE]') - >>> - >>> response = client.update_inspect_template(name) - - Args: - name (str): Required. Resource name of organization and inspectTemplate to be - updated, for example - ``organizations/433245324/inspectTemplates/432452342`` or - projects/project-id/inspectTemplates/432452342. - inspect_template (Union[dict, ~google.cloud.dlp_v2.types.InspectTemplate]): New InspectTemplate value. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dlp_v2.types.InspectTemplate` - update_mask (Union[dict, ~google.cloud.dlp_v2.types.FieldMask]): Mask to control which fields get updated. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dlp_v2.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dlp_v2.types.InspectTemplate` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_inspect_template" not in self._inner_api_calls: - self._inner_api_calls[ - "update_inspect_template" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_inspect_template, - default_retry=self._method_configs["UpdateInspectTemplate"].retry, - default_timeout=self._method_configs["UpdateInspectTemplate"].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.UpdateInspectTemplateRequest( - name=name, inspect_template=inspect_template, update_mask=update_mask - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_inspect_template"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_inspect_template( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates to learn more. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> name = client.organization_inspect_template_path('[ORGANIZATION]', '[INSPECT_TEMPLATE]') - >>> - >>> response = client.get_inspect_template(name) - - Args: - name (str): Required. Resource name of the organization and inspectTemplate to - be read, for example - ``organizations/433245324/inspectTemplates/432452342`` or - projects/project-id/inspectTemplates/432452342. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dlp_v2.types.InspectTemplate` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_inspect_template" not in self._inner_api_calls: - self._inner_api_calls[ - "get_inspect_template" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_inspect_template, - default_retry=self._method_configs["GetInspectTemplate"].retry, - default_timeout=self._method_configs["GetInspectTemplate"].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.GetInspectTemplateRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_inspect_template"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_inspect_templates( - self, - parent, - page_size=None, - order_by=None, - location_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists InspectTemplates. - See https://cloud.google.com/dlp/docs/creating-templates to learn more. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> parent = client.organization_location_path('[ORGANIZATION]', '[LOCATION]') - >>> - >>> # Iterate over all results - >>> for element in client.list_inspect_templates(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_inspect_templates(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. The parent resource name, for example - projects/my-project-id or organizations/my-org-id or - projects/my-project-id/locations/{location_id}. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - order_by (str): Comma separated list of fields to order by, followed by ``asc`` or - ``desc`` postfix. This list is case-insensitive, default sorting order - is ascending, redundant space characters are insignificant. - - Example: ``name asc,update_time, create_time desc`` - - Supported fields are: - - - ``create_time``: corresponds to time the template was created. - - ``update_time``: corresponds to time the template was last updated. - - ``name``: corresponds to template's name. - - ``display_name``: corresponds to template's display name. - location_id (str): Deprecated. This field has no effect. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.dlp_v2.types.InspectTemplate` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_inspect_templates" not in self._inner_api_calls: - self._inner_api_calls[ - "list_inspect_templates" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_inspect_templates, - default_retry=self._method_configs["ListInspectTemplates"].retry, - default_timeout=self._method_configs["ListInspectTemplates"].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.ListInspectTemplatesRequest( - parent=parent, - page_size=page_size, - order_by=order_by, - location_id=location_id, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_inspect_templates"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="inspect_templates", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def delete_inspect_template( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates to learn more. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> name = client.organization_inspect_template_path('[ORGANIZATION]', '[INSPECT_TEMPLATE]') - >>> - >>> client.delete_inspect_template(name) - - Args: - name (str): Required. Resource name of the organization and inspectTemplate to - be deleted, for example - ``organizations/433245324/inspectTemplates/432452342`` or - projects/project-id/inspectTemplates/432452342. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_inspect_template" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_inspect_template" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_inspect_template, - default_retry=self._method_configs["DeleteInspectTemplate"].retry, - default_timeout=self._method_configs["DeleteInspectTemplate"].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.DeleteInspectTemplateRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_inspect_template"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def create_deidentify_template( - self, - parent, - deidentify_template, - template_id=None, - location_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a DeidentifyTemplate for re-using frequently used configuration - for de-identifying content, images, and storage. - See https://cloud.google.com/dlp/docs/creating-templates-deid to learn - more. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> parent = client.organization_location_path('[ORGANIZATION]', '[LOCATION]') - >>> - >>> # TODO: Initialize `deidentify_template`: - >>> deidentify_template = {} - >>> - >>> response = client.create_deidentify_template(parent, deidentify_template) - - Args: - parent (str): Required. The parent resource name, for example - projects/my-project-id or organizations/my-org-id or - projects/my-project-id/locations/{location_id}. - deidentify_template (Union[dict, ~google.cloud.dlp_v2.types.DeidentifyTemplate]): Required. The DeidentifyTemplate to create. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dlp_v2.types.DeidentifyTemplate` - template_id (str): The template id can contain uppercase and lowercase letters, - numbers, and hyphens; that is, it must match the regular expression: - ``[a-zA-Z\\d-_]+``. The maximum length is 100 characters. Can be empty - to allow the system to generate one. - location_id (str): Deprecated. This field has no effect. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dlp_v2.types.DeidentifyTemplate` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_deidentify_template" not in self._inner_api_calls: - self._inner_api_calls[ - "create_deidentify_template" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_deidentify_template, - default_retry=self._method_configs["CreateDeidentifyTemplate"].retry, - default_timeout=self._method_configs[ - "CreateDeidentifyTemplate" - ].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.CreateDeidentifyTemplateRequest( - parent=parent, - deidentify_template=deidentify_template, - template_id=template_id, - location_id=location_id, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_deidentify_template"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_deidentify_template( - self, - name, - deidentify_template=None, - update_mask=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates the DeidentifyTemplate. - See https://cloud.google.com/dlp/docs/creating-templates-deid to learn - more. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> name = client.organization_deidentify_template_path('[ORGANIZATION]', '[DEIDENTIFY_TEMPLATE]') - >>> - >>> response = client.update_deidentify_template(name) - - Args: - name (str): Required. Resource name of organization and deidentify template to - be updated, for example - ``organizations/433245324/deidentifyTemplates/432452342`` or - projects/project-id/deidentifyTemplates/432452342. - deidentify_template (Union[dict, ~google.cloud.dlp_v2.types.DeidentifyTemplate]): New DeidentifyTemplate value. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dlp_v2.types.DeidentifyTemplate` - update_mask (Union[dict, ~google.cloud.dlp_v2.types.FieldMask]): Mask to control which fields get updated. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dlp_v2.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dlp_v2.types.DeidentifyTemplate` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_deidentify_template" not in self._inner_api_calls: - self._inner_api_calls[ - "update_deidentify_template" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_deidentify_template, - default_retry=self._method_configs["UpdateDeidentifyTemplate"].retry, - default_timeout=self._method_configs[ - "UpdateDeidentifyTemplate" - ].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.UpdateDeidentifyTemplateRequest( - name=name, deidentify_template=deidentify_template, update_mask=update_mask - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_deidentify_template"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_deidentify_template( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets a DeidentifyTemplate. - See https://cloud.google.com/dlp/docs/creating-templates-deid to learn - more. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> name = client.organization_deidentify_template_path('[ORGANIZATION]', '[DEIDENTIFY_TEMPLATE]') - >>> - >>> response = client.get_deidentify_template(name) - - Args: - name (str): Required. Resource name of the organization and deidentify template - to be read, for example - ``organizations/433245324/deidentifyTemplates/432452342`` or - projects/project-id/deidentifyTemplates/432452342. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dlp_v2.types.DeidentifyTemplate` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_deidentify_template" not in self._inner_api_calls: - self._inner_api_calls[ - "get_deidentify_template" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_deidentify_template, - default_retry=self._method_configs["GetDeidentifyTemplate"].retry, - default_timeout=self._method_configs["GetDeidentifyTemplate"].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.GetDeidentifyTemplateRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_deidentify_template"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_deidentify_templates( - self, - parent, - page_size=None, - order_by=None, - location_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists DeidentifyTemplates. - See https://cloud.google.com/dlp/docs/creating-templates-deid to learn - more. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> parent = client.organization_location_path('[ORGANIZATION]', '[LOCATION]') - >>> - >>> # Iterate over all results - >>> for element in client.list_deidentify_templates(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_deidentify_templates(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. The parent resource name, for example - projects/my-project-id or organizations/my-org-id or - projects/my-project-id/locations/{location_id}. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - order_by (str): Comma separated list of fields to order by, followed by ``asc`` or - ``desc`` postfix. This list is case-insensitive, default sorting order - is ascending, redundant space characters are insignificant. - - Example: ``name asc,update_time, create_time desc`` - - Supported fields are: - - - ``create_time``: corresponds to time the template was created. - - ``update_time``: corresponds to time the template was last updated. - - ``name``: corresponds to template's name. - - ``display_name``: corresponds to template's display name. - location_id (str): Deprecated. This field has no effect. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.dlp_v2.types.DeidentifyTemplate` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_deidentify_templates" not in self._inner_api_calls: - self._inner_api_calls[ - "list_deidentify_templates" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_deidentify_templates, - default_retry=self._method_configs["ListDeidentifyTemplates"].retry, - default_timeout=self._method_configs["ListDeidentifyTemplates"].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.ListDeidentifyTemplatesRequest( - parent=parent, - page_size=page_size, - order_by=order_by, - location_id=location_id, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_deidentify_templates"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="deidentify_templates", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def delete_deidentify_template( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes a DeidentifyTemplate. - See https://cloud.google.com/dlp/docs/creating-templates-deid to learn - more. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> name = client.organization_deidentify_template_path('[ORGANIZATION]', '[DEIDENTIFY_TEMPLATE]') - >>> - >>> client.delete_deidentify_template(name) - - Args: - name (str): Required. Resource name of the organization and deidentify template - to be deleted, for example - ``organizations/433245324/deidentifyTemplates/432452342`` or - projects/project-id/deidentifyTemplates/432452342. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_deidentify_template" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_deidentify_template" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_deidentify_template, - default_retry=self._method_configs["DeleteDeidentifyTemplate"].retry, - default_timeout=self._method_configs[ - "DeleteDeidentifyTemplate" - ].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.DeleteDeidentifyTemplateRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_deidentify_template"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def create_dlp_job( - self, - parent, - inspect_job=None, - risk_job=None, - job_id=None, - location_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a new job to inspect storage or calculate risk metrics. - See https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more. - - When no InfoTypes or CustomInfoTypes are specified in inspect jobs, the - system will automatically choose what detectors to run. By default this may - be all types, but may change over time as detectors are updated. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> response = client.create_dlp_job(parent) - - Args: - parent (str): Required. The parent resource name, for example - projects/my-project-id or - projects/my-project-id/locations/{location_id}. - inspect_job (Union[dict, ~google.cloud.dlp_v2.types.InspectJobConfig]): Set to control what and how to inspect. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dlp_v2.types.InspectJobConfig` - risk_job (Union[dict, ~google.cloud.dlp_v2.types.RiskAnalysisJobConfig]): Set to choose what metric to calculate. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dlp_v2.types.RiskAnalysisJobConfig` - job_id (str): The job id can contain uppercase and lowercase letters, numbers, and - hyphens; that is, it must match the regular expression: - ``[a-zA-Z\\d-_]+``. The maximum length is 100 characters. Can be empty - to allow the system to generate one. - location_id (str): Deprecated. This field has no effect. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dlp_v2.types.DlpJob` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_dlp_job" not in self._inner_api_calls: - self._inner_api_calls[ - "create_dlp_job" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_dlp_job, - default_retry=self._method_configs["CreateDlpJob"].retry, - default_timeout=self._method_configs["CreateDlpJob"].timeout, - client_info=self._client_info, - ) - - # Sanity check: We have some fields which are mutually exclusive; - # raise ValueError if more than one is sent. - google.api_core.protobuf_helpers.check_oneof( - inspect_job=inspect_job, risk_job=risk_job - ) - - request = dlp_pb2.CreateDlpJobRequest( - parent=parent, - inspect_job=inspect_job, - risk_job=risk_job, - job_id=job_id, - location_id=location_id, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_dlp_job"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_dlp_jobs( - self, - parent, - filter_=None, - page_size=None, - type_=None, - order_by=None, - location_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists DlpJobs that match the specified filter in the request. - See https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_dlp_jobs(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_dlp_jobs(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. The parent resource name, for example - projects/my-project-id or - projects/my-project-id/locations/{location_id}. - filter_ (str): Allows filtering. - - Supported syntax: - - - Filter expressions are made up of one or more restrictions. - - Restrictions can be combined by ``AND`` or ``OR`` logical operators. - A sequence of restrictions implicitly uses ``AND``. - - A restriction has the form of ``{field} {operator} {value}``. - - Supported fields/values for inspect jobs: - - - ``state`` - PENDING|RUNNING|CANCELED|FINISHED|FAILED - - ``inspected_storage`` - DATASTORE|CLOUD_STORAGE|BIGQUERY - - ``trigger_name`` - The resource name of the trigger that created - job. - - 'end_time\` - Corresponds to time the job finished. - - 'start_time\` - Corresponds to time the job finished. - - - Supported fields for risk analysis jobs: - - - ``state`` - RUNNING|CANCELED|FINISHED|FAILED - - 'end_time\` - Corresponds to time the job finished. - - 'start_time\` - Corresponds to time the job finished. - - - The operator must be ``=`` or ``!=``. - - Examples: - - - inspected_storage = cloud_storage AND state = done - - inspected_storage = cloud_storage OR inspected_storage = bigquery - - inspected_storage = cloud_storage AND (state = done OR state = - canceled) - - end_time > "2017-12-12T00:00:00+00:00" - - The length of this field should be no more than 500 characters. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - type_ (~google.cloud.dlp_v2.types.DlpJobType): The type of job. Defaults to ``DlpJobType.INSPECT`` - order_by (str): Comma separated list of fields to order by, followed by ``asc`` or - ``desc`` postfix. This list is case-insensitive, default sorting order - is ascending, redundant space characters are insignificant. - - Example: ``name asc, end_time asc, create_time desc`` - - Supported fields are: - - - ``create_time``: corresponds to time the job was created. - - ``end_time``: corresponds to time the job ended. - - ``name``: corresponds to job's name. - - ``state``: corresponds to ``state`` - location_id (str): Deprecated. This field has no effect. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.dlp_v2.types.DlpJob` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_dlp_jobs" not in self._inner_api_calls: - self._inner_api_calls[ - "list_dlp_jobs" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_dlp_jobs, - default_retry=self._method_configs["ListDlpJobs"].retry, - default_timeout=self._method_configs["ListDlpJobs"].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.ListDlpJobsRequest( - parent=parent, - filter=filter_, - page_size=page_size, - type=type_, - order_by=order_by, - location_id=location_id, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_dlp_jobs"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="jobs", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def get_dlp_job( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets the latest state of a long-running DlpJob. - See https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> # TODO: Initialize `name`: - >>> name = '' - >>> - >>> response = client.get_dlp_job(name) - - Args: - name (str): Required. The name of the DlpJob resource. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dlp_v2.types.DlpJob` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_dlp_job" not in self._inner_api_calls: - self._inner_api_calls[ - "get_dlp_job" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_dlp_job, - default_retry=self._method_configs["GetDlpJob"].retry, - default_timeout=self._method_configs["GetDlpJob"].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.GetDlpJobRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_dlp_job"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_dlp_job( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes a long-running DlpJob. This method indicates that the client is - no longer interested in the DlpJob result. The job will be cancelled if - possible. - See https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> # TODO: Initialize `name`: - >>> name = '' - >>> - >>> client.delete_dlp_job(name) - - Args: - name (str): Required. The name of the DlpJob resource to be deleted. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_dlp_job" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_dlp_job" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_dlp_job, - default_retry=self._method_configs["DeleteDlpJob"].retry, - default_timeout=self._method_configs["DeleteDlpJob"].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.DeleteDlpJobRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_dlp_job"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def cancel_dlp_job( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Starts asynchronous cancellation on a long-running DlpJob. The server - makes a best effort to cancel the DlpJob, but success is not - guaranteed. - See https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> # TODO: Initialize `name`: - >>> name = '' - >>> - >>> client.cancel_dlp_job(name) - - Args: - name (str): Required. The name of the DlpJob resource to be cancelled. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "cancel_dlp_job" not in self._inner_api_calls: - self._inner_api_calls[ - "cancel_dlp_job" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.cancel_dlp_job, - default_retry=self._method_configs["CancelDlpJob"].retry, - default_timeout=self._method_configs["CancelDlpJob"].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.CancelDlpJobRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["cancel_dlp_job"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def finish_dlp_job( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Finish a running hybrid DlpJob. Triggers the finalization steps and running - of any enabled actions that have not yet run. - Early access feature is in a pre-release state and might change or have - limited support. For more information, see - https://cloud.google.com/products#product-launch-stages. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> # TODO: Initialize `name`: - >>> name = '' - >>> - >>> client.finish_dlp_job(name) - - Args: - name (str): Required. The name of the DlpJob resource to be cancelled. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "finish_dlp_job" not in self._inner_api_calls: - self._inner_api_calls[ - "finish_dlp_job" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.finish_dlp_job, - default_retry=self._method_configs["FinishDlpJob"].retry, - default_timeout=self._method_configs["FinishDlpJob"].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.FinishDlpJobRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["finish_dlp_job"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def hybrid_inspect_dlp_job( - self, - name, - hybrid_item=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Inspect hybrid content and store findings to a job. - To review the findings inspect the job. Inspection will occur - asynchronously. - Early access feature is in a pre-release state and might change or have - limited support. For more information, see - https://cloud.google.com/products#product-launch-stages. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> # TODO: Initialize `name`: - >>> name = '' - >>> - >>> response = client.hybrid_inspect_dlp_job(name) - - Args: - name (str): Required. Resource name of the job to execute a hybrid inspect on, - for example ``projects/dlp-test-project/dlpJob/53234423``. - hybrid_item (Union[dict, ~google.cloud.dlp_v2.types.HybridContentItem]): The item to inspect. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dlp_v2.types.HybridContentItem` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dlp_v2.types.HybridInspectResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "hybrid_inspect_dlp_job" not in self._inner_api_calls: - self._inner_api_calls[ - "hybrid_inspect_dlp_job" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.hybrid_inspect_dlp_job, - default_retry=self._method_configs["HybridInspectDlpJob"].retry, - default_timeout=self._method_configs["HybridInspectDlpJob"].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.HybridInspectDlpJobRequest(name=name, hybrid_item=hybrid_item) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["hybrid_inspect_dlp_job"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_job_triggers( - self, - parent, - page_size=None, - order_by=None, - filter_=None, - location_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists job triggers. - See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_job_triggers(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_job_triggers(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. The parent resource name, for example - ``projects/my-project-id`` or - projects/my-project-id/locations/{location_id}. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - order_by (str): Comma separated list of triggeredJob fields to order by, followed by - ``asc`` or ``desc`` postfix. This list is case-insensitive, default - sorting order is ascending, redundant space characters are - insignificant. - - Example: ``name asc,update_time, create_time desc`` - - Supported fields are: - - - ``create_time``: corresponds to time the JobTrigger was created. - - ``update_time``: corresponds to time the JobTrigger was last updated. - - ``last_run_time``: corresponds to the last time the JobTrigger ran. - - ``name``: corresponds to JobTrigger's name. - - ``display_name``: corresponds to JobTrigger's display name. - - ``status``: corresponds to JobTrigger's status. - filter_ (str): Allows filtering. - - Supported syntax: - - - Filter expressions are made up of one or more restrictions. - - Restrictions can be combined by ``AND`` or ``OR`` logical operators. - A sequence of restrictions implicitly uses ``AND``. - - A restriction has the form of ``{field} {operator} {value}``. - - Supported fields/values for inspect jobs: - - - ``status`` - HEALTHY|PAUSED|CANCELLED - - ``inspected_storage`` - DATASTORE|CLOUD_STORAGE|BIGQUERY - - 'last_run_time\` - RFC 3339 formatted timestamp, surrounded by - quotation marks. Nanoseconds are ignored. - - 'error_count' - Number of errors that have occurred while running. - - - The operator must be ``=`` or ``!=`` for status and - inspected_storage. - - Examples: - - - inspected_storage = cloud_storage AND status = HEALTHY - - inspected_storage = cloud_storage OR inspected_storage = bigquery - - inspected_storage = cloud_storage AND (state = PAUSED OR state = - HEALTHY) - - last_run_time > "2017-12-12T00:00:00+00:00" - - The length of this field should be no more than 500 characters. - location_id (str): Deprecated. This field has no effect. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.dlp_v2.types.JobTrigger` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_job_triggers" not in self._inner_api_calls: - self._inner_api_calls[ - "list_job_triggers" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_job_triggers, - default_retry=self._method_configs["ListJobTriggers"].retry, - default_timeout=self._method_configs["ListJobTriggers"].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.ListJobTriggersRequest( - parent=parent, - page_size=page_size, - order_by=order_by, - filter=filter_, - location_id=location_id, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_job_triggers"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="job_triggers", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def get_job_trigger( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets a job trigger. - See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> name = client.project_job_trigger_path('[PROJECT]', '[JOB_TRIGGER]') - >>> - >>> response = client.get_job_trigger(name) - - Args: - name (str): Required. Resource name of the project and the triggeredJob, for - example ``projects/dlp-test-project/jobTriggers/53234423``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dlp_v2.types.JobTrigger` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_job_trigger" not in self._inner_api_calls: - self._inner_api_calls[ - "get_job_trigger" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_job_trigger, - default_retry=self._method_configs["GetJobTrigger"].retry, - default_timeout=self._method_configs["GetJobTrigger"].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.GetJobTriggerRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_job_trigger"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_job_trigger( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes a job trigger. - See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> name = client.project_job_trigger_path('[PROJECT]', '[JOB_TRIGGER]') - >>> - >>> client.delete_job_trigger(name) - - Args: - name (str): Required. Resource name of the project and the triggeredJob, for - example ``projects/dlp-test-project/jobTriggers/53234423``. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_job_trigger" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_job_trigger" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_job_trigger, - default_retry=self._method_configs["DeleteJobTrigger"].retry, - default_timeout=self._method_configs["DeleteJobTrigger"].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.DeleteJobTriggerRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_job_trigger"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def hybrid_inspect_job_trigger( - self, - name, - hybrid_item=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Inspect hybrid content and store findings to a trigger. The inspection - will be processed asynchronously. To review the findings monitor the - jobs within the trigger. - Early access feature is in a pre-release state and might change or have - limited support. For more information, see - https://cloud.google.com/products#product-launch-stages. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> name = client.project_job_trigger_path('[PROJECT]', '[JOB_TRIGGER]') - >>> - >>> response = client.hybrid_inspect_job_trigger(name) - - Args: - name (str): Required. Resource name of the trigger to execute a hybrid inspect - on, for example ``projects/dlp-test-project/jobTriggers/53234423``. - hybrid_item (Union[dict, ~google.cloud.dlp_v2.types.HybridContentItem]): The item to inspect. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dlp_v2.types.HybridContentItem` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dlp_v2.types.HybridInspectResponse` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "hybrid_inspect_job_trigger" not in self._inner_api_calls: - self._inner_api_calls[ - "hybrid_inspect_job_trigger" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.hybrid_inspect_job_trigger, - default_retry=self._method_configs["HybridInspectJobTrigger"].retry, - default_timeout=self._method_configs["HybridInspectJobTrigger"].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.HybridInspectJobTriggerRequest( - name=name, hybrid_item=hybrid_item - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["hybrid_inspect_job_trigger"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_job_trigger( - self, - name, - job_trigger=None, - update_mask=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates a job trigger. - See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> name = client.project_job_trigger_path('[PROJECT]', '[JOB_TRIGGER]') - >>> - >>> response = client.update_job_trigger(name) - - Args: - name (str): Required. Resource name of the project and the triggeredJob, for - example ``projects/dlp-test-project/jobTriggers/53234423``. - job_trigger (Union[dict, ~google.cloud.dlp_v2.types.JobTrigger]): New JobTrigger value. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dlp_v2.types.JobTrigger` - update_mask (Union[dict, ~google.cloud.dlp_v2.types.FieldMask]): Mask to control which fields get updated. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dlp_v2.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dlp_v2.types.JobTrigger` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_job_trigger" not in self._inner_api_calls: - self._inner_api_calls[ - "update_job_trigger" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_job_trigger, - default_retry=self._method_configs["UpdateJobTrigger"].retry, - default_timeout=self._method_configs["UpdateJobTrigger"].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.UpdateJobTriggerRequest( - name=name, job_trigger=job_trigger, update_mask=update_mask - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_job_trigger"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def create_job_trigger( - self, - parent, - job_trigger, - trigger_id=None, - location_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a job trigger to run DLP actions such as scanning storage for - sensitive information on a set schedule. - See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # TODO: Initialize `job_trigger`: - >>> job_trigger = {} - >>> - >>> response = client.create_job_trigger(parent, job_trigger) - - Args: - parent (str): Required. The parent resource name, for example - projects/my-project-id or - projects/my-project-id/locations/{location_id}. - job_trigger (Union[dict, ~google.cloud.dlp_v2.types.JobTrigger]): Required. The JobTrigger to create. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dlp_v2.types.JobTrigger` - trigger_id (str): The trigger id can contain uppercase and lowercase letters, numbers, - and hyphens; that is, it must match the regular expression: - ``[a-zA-Z\\d-_]+``. The maximum length is 100 characters. Can be empty - to allow the system to generate one. - location_id (str): Deprecated. This field has no effect. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dlp_v2.types.JobTrigger` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_job_trigger" not in self._inner_api_calls: - self._inner_api_calls[ - "create_job_trigger" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_job_trigger, - default_retry=self._method_configs["CreateJobTrigger"].retry, - default_timeout=self._method_configs["CreateJobTrigger"].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.CreateJobTriggerRequest( - parent=parent, - job_trigger=job_trigger, - trigger_id=trigger_id, - location_id=location_id, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_job_trigger"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def create_stored_info_type( - self, - parent, - config, - stored_info_type_id=None, - location_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a pre-built stored infoType to be used for inspection. - See https://cloud.google.com/dlp/docs/creating-stored-infotypes to - learn more. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> parent = client.organization_location_path('[ORGANIZATION]', '[LOCATION]') - >>> - >>> # TODO: Initialize `config`: - >>> config = {} - >>> - >>> response = client.create_stored_info_type(parent, config) - - Args: - parent (str): Required. The parent resource name, for example - projects/my-project-id or organizations/my-org-id or - projects/my-project-id/locations/{location_id} - config (Union[dict, ~google.cloud.dlp_v2.types.StoredInfoTypeConfig]): Required. Configuration of the storedInfoType to create. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dlp_v2.types.StoredInfoTypeConfig` - stored_info_type_id (str): The storedInfoType ID can contain uppercase and lowercase letters, - numbers, and hyphens; that is, it must match the regular expression: - ``[a-zA-Z\\d-_]+``. The maximum length is 100 characters. Can be empty - to allow the system to generate one. - location_id (str): Deprecated. This field has no effect. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dlp_v2.types.StoredInfoType` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_stored_info_type" not in self._inner_api_calls: - self._inner_api_calls[ - "create_stored_info_type" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_stored_info_type, - default_retry=self._method_configs["CreateStoredInfoType"].retry, - default_timeout=self._method_configs["CreateStoredInfoType"].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.CreateStoredInfoTypeRequest( - parent=parent, - config=config, - stored_info_type_id=stored_info_type_id, - location_id=location_id, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_stored_info_type"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_stored_info_type( - self, - name, - config=None, - update_mask=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Updates the stored infoType by creating a new version. The existing version - will continue to be used until the new version is ready. - See https://cloud.google.com/dlp/docs/creating-stored-infotypes to - learn more. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> name = client.organization_stored_info_type_path('[ORGANIZATION]', '[STORED_INFO_TYPE]') - >>> - >>> response = client.update_stored_info_type(name) - - Args: - name (str): Required. Resource name of organization and storedInfoType to be - updated, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - config (Union[dict, ~google.cloud.dlp_v2.types.StoredInfoTypeConfig]): Updated configuration for the storedInfoType. If not provided, a new - version of the storedInfoType will be created with the existing - configuration. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dlp_v2.types.StoredInfoTypeConfig` - update_mask (Union[dict, ~google.cloud.dlp_v2.types.FieldMask]): Mask to control which fields get updated. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.dlp_v2.types.FieldMask` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dlp_v2.types.StoredInfoType` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_stored_info_type" not in self._inner_api_calls: - self._inner_api_calls[ - "update_stored_info_type" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_stored_info_type, - default_retry=self._method_configs["UpdateStoredInfoType"].retry, - default_timeout=self._method_configs["UpdateStoredInfoType"].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.UpdateStoredInfoTypeRequest( - name=name, config=config, update_mask=update_mask - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_stored_info_type"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def get_stored_info_type( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Gets a stored infoType. - See https://cloud.google.com/dlp/docs/creating-stored-infotypes to - learn more. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> name = client.organization_stored_info_type_path('[ORGANIZATION]', '[STORED_INFO_TYPE]') - >>> - >>> response = client.get_stored_info_type(name) - - Args: - name (str): Required. Resource name of the organization and storedInfoType to be - read, for example ``organizations/433245324/storedInfoTypes/432452342`` - or projects/project-id/storedInfoTypes/432452342. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.dlp_v2.types.StoredInfoType` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_stored_info_type" not in self._inner_api_calls: - self._inner_api_calls[ - "get_stored_info_type" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_stored_info_type, - default_retry=self._method_configs["GetStoredInfoType"].retry, - default_timeout=self._method_configs["GetStoredInfoType"].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.GetStoredInfoTypeRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_stored_info_type"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_stored_info_types( - self, - parent, - page_size=None, - order_by=None, - location_id=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists stored infoTypes. - See https://cloud.google.com/dlp/docs/creating-stored-infotypes to - learn more. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> parent = client.organization_location_path('[ORGANIZATION]', '[LOCATION]') - >>> - >>> # Iterate over all results - >>> for element in client.list_stored_info_types(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_stored_info_types(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. The parent resource name, for example - projects/my-project-id or organizations/my-org-id or - projects/my-project-id/locations/{location_id}. - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - order_by (str): Comma separated list of fields to order by, followed by ``asc`` or - ``desc`` postfix. This list is case-insensitive, default sorting order - is ascending, redundant space characters are insignificant. - - Example: ``name asc, display_name, create_time desc`` - - Supported fields are: - - - ``create_time``: corresponds to time the most recent version of the - resource was created. - - ``state``: corresponds to the state of the resource. - - ``name``: corresponds to resource name. - - ``display_name``: corresponds to info type's display name. - location_id (str): Deprecated. This field has no effect. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.dlp_v2.types.StoredInfoType` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_stored_info_types" not in self._inner_api_calls: - self._inner_api_calls[ - "list_stored_info_types" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_stored_info_types, - default_retry=self._method_configs["ListStoredInfoTypes"].retry, - default_timeout=self._method_configs["ListStoredInfoTypes"].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.ListStoredInfoTypesRequest( - parent=parent, - page_size=page_size, - order_by=order_by, - location_id=location_id, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_stored_info_types"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="stored_info_types", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def delete_stored_info_type( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes a stored infoType. - See https://cloud.google.com/dlp/docs/creating-stored-infotypes to - learn more. - - Example: - >>> from google.cloud import dlp_v2 - >>> - >>> client = dlp_v2.DlpServiceClient() - >>> - >>> name = client.organization_stored_info_type_path('[ORGANIZATION]', '[STORED_INFO_TYPE]') - >>> - >>> client.delete_stored_info_type(name) - - Args: - name (str): Required. Resource name of the organization and storedInfoType to be - deleted, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_stored_info_type" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_stored_info_type" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_stored_info_type, - default_retry=self._method_configs["DeleteStoredInfoType"].retry, - default_timeout=self._method_configs["DeleteStoredInfoType"].timeout, - client_info=self._client_info, - ) - - request = dlp_pb2.DeleteStoredInfoTypeRequest(name=name) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_stored_info_type"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/google/cloud/dlp_v2/gapic/dlp_service_client_config.py b/google/cloud/dlp_v2/gapic/dlp_service_client_config.py deleted file mode 100644 index 7af084d2..00000000 --- a/google/cloud/dlp_v2/gapic/dlp_service_client_config.py +++ /dev/null @@ -1,188 +0,0 @@ -config = { - "interfaces": { - "google.privacy.dlp.v2.DlpService": { - "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "non_idempotent": [], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000, - } - }, - "methods": { - "InspectContent": { - "timeout_millis": 300000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "RedactImage": { - "timeout_millis": 300000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "DeidentifyContent": { - "timeout_millis": 300000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ReidentifyContent": { - "timeout_millis": 300000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListInfoTypes": { - "timeout_millis": 300000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "CreateInspectTemplate": { - "timeout_millis": 300000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "UpdateInspectTemplate": { - "timeout_millis": 300000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "GetInspectTemplate": { - "timeout_millis": 300000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListInspectTemplates": { - "timeout_millis": 300000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "DeleteInspectTemplate": { - "timeout_millis": 300000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "CreateDeidentifyTemplate": { - "timeout_millis": 300000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "UpdateDeidentifyTemplate": { - "timeout_millis": 300000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "GetDeidentifyTemplate": { - "timeout_millis": 300000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListDeidentifyTemplates": { - "timeout_millis": 300000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "DeleteDeidentifyTemplate": { - "timeout_millis": 300000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "CreateDlpJob": { - "timeout_millis": 300000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "ListDlpJobs": { - "timeout_millis": 300000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "GetDlpJob": { - "timeout_millis": 300000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "DeleteDlpJob": { - "timeout_millis": 300000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "CancelDlpJob": { - "timeout_millis": 300000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "FinishDlpJob": { - "timeout_millis": 300000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "HybridInspectDlpJob": { - "timeout_millis": 300000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "ListJobTriggers": { - "timeout_millis": 300000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "GetJobTrigger": { - "timeout_millis": 300000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "DeleteJobTrigger": { - "timeout_millis": 300000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "HybridInspectJobTrigger": { - "timeout_millis": 300000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "UpdateJobTrigger": { - "timeout_millis": 300000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "CreateJobTrigger": { - "timeout_millis": 300000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "CreateStoredInfoType": { - "timeout_millis": 300000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "UpdateStoredInfoType": { - "timeout_millis": 300000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "GetStoredInfoType": { - "timeout_millis": 300000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "ListStoredInfoTypes": { - "timeout_millis": 300000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "DeleteStoredInfoType": { - "timeout_millis": 300000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/google/cloud/dlp_v2/gapic/enums.py b/google/cloud/dlp_v2/gapic/enums.py deleted file mode 100644 index 5c055bb6..00000000 --- a/google/cloud/dlp_v2/gapic/enums.py +++ /dev/null @@ -1,487 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Wrappers for protocol buffer enum types.""" - -import enum - - -class ContentOption(enum.IntEnum): - """ - Options describing which parts of the provided content should be scanned. - - Attributes: - CONTENT_UNSPECIFIED (int): Includes entire content of a file or a data stream. - CONTENT_TEXT (int): Text content within the data, excluding any metadata. - CONTENT_IMAGE (int): Images found in the data. - """ - - CONTENT_UNSPECIFIED = 0 - CONTENT_TEXT = 1 - CONTENT_IMAGE = 2 - - -class DayOfWeek(enum.IntEnum): - """ - Represents a day of week. - - Attributes: - DAY_OF_WEEK_UNSPECIFIED (int): The unspecified day-of-week. - MONDAY (int): The day-of-week of Monday. - TUESDAY (int): The day-of-week of Tuesday. - WEDNESDAY (int): The day-of-week of Wednesday. - THURSDAY (int): The day-of-week of Thursday. - FRIDAY (int): The day-of-week of Friday. - SATURDAY (int): The day-of-week of Saturday. - SUNDAY (int): The day-of-week of Sunday. - """ - - DAY_OF_WEEK_UNSPECIFIED = 0 - MONDAY = 1 - TUESDAY = 2 - WEDNESDAY = 3 - THURSDAY = 4 - FRIDAY = 5 - SATURDAY = 6 - SUNDAY = 7 - - -class DlpJobType(enum.IntEnum): - """ - An enum to represent the various types of DLP jobs. - - Attributes: - DLP_JOB_TYPE_UNSPECIFIED (int): Unused - INSPECT_JOB (int): The job inspected Google Cloud for sensitive data. - RISK_ANALYSIS_JOB (int): The job executed a Risk Analysis computation. - """ - - DLP_JOB_TYPE_UNSPECIFIED = 0 - INSPECT_JOB = 1 - RISK_ANALYSIS_JOB = 2 - - -class FileType(enum.IntEnum): - """ - Definitions of file type groups to scan. - - Attributes: - FILE_TYPE_UNSPECIFIED (int): Includes all files. - BINARY_FILE (int): Includes all file extensions not covered by text file types. - TEXT_FILE (int): Included file extensions: - asc, brf, c, cc, cpp, csv, cxx, c++, cs, css, dart, eml, go, h, hh, hpp, - hxx, h++, hs, html, htm, shtml, shtm, xhtml, lhs, ini, java, js, json, - ocaml, md, mkd, markdown, m, ml, mli, pl, pm, php, phtml, pht, py, pyw, - rb, rbw, rs, rc, scala, sh, sql, tex, txt, text, tsv, vcard, vcs, wml, - xml, xsl, xsd, yml, yaml. - IMAGE (int): Included file extensions: bmp, gif, jpg, jpeg, jpe, png. - bytes_limit_per_file has no effect on image files. Image inspection is - restricted to 'global', 'us', 'asia', and 'europe'. - WORD (int): Included file extensions: - docx, dotx, docm, dotm - PDF (int): Included file extensions: - pdf - AVRO (int): Included file extensions: - avro - """ - - FILE_TYPE_UNSPECIFIED = 0 - BINARY_FILE = 1 - TEXT_FILE = 2 - IMAGE = 3 - WORD = 5 - PDF = 6 - AVRO = 7 - - -class InfoTypeSupportedBy(enum.IntEnum): - """ - Parts of the APIs which use certain infoTypes. - - Attributes: - ENUM_TYPE_UNSPECIFIED (int): Unused. - INSPECT (int): Supported by the inspect operations. - RISK_ANALYSIS (int): Supported by the risk analysis operations. - """ - - ENUM_TYPE_UNSPECIFIED = 0 - INSPECT = 1 - RISK_ANALYSIS = 2 - - -class Likelihood(enum.IntEnum): - """ - Categorization of results based on how likely they are to represent a match, - based on the number of elements they contain which imply a match. - - Attributes: - LIKELIHOOD_UNSPECIFIED (int): Default value; same as POSSIBLE. - VERY_UNLIKELY (int): Few matching elements. - UNLIKELY (int) - POSSIBLE (int): Some matching elements. - LIKELY (int) - VERY_LIKELY (int): Many matching elements. - """ - - LIKELIHOOD_UNSPECIFIED = 0 - VERY_UNLIKELY = 1 - UNLIKELY = 2 - POSSIBLE = 3 - LIKELY = 4 - VERY_LIKELY = 5 - - -class MatchingType(enum.IntEnum): - """ - Type of the match which can be applied to different ways of matching, like - Dictionary, regular expression and intersecting with findings of another - info type. - - Attributes: - MATCHING_TYPE_UNSPECIFIED (int): Invalid. - MATCHING_TYPE_FULL_MATCH (int): Full match. - - - Dictionary: join of Dictionary results matched complete finding quote - - Regex: all regex matches fill a finding quote start to end - - Exclude info type: completely inside affecting info types findings - MATCHING_TYPE_PARTIAL_MATCH (int): Partial match. - - - Dictionary: at least one of the tokens in the finding matches - - Regex: substring of the finding matches - - Exclude info type: intersects with affecting info types findings - MATCHING_TYPE_INVERSE_MATCH (int): Inverse match. - - - Dictionary: no tokens in the finding match the dictionary - - Regex: finding doesn't match the regex - - Exclude info type: no intersection with affecting info types findings - """ - - MATCHING_TYPE_UNSPECIFIED = 0 - MATCHING_TYPE_FULL_MATCH = 1 - MATCHING_TYPE_PARTIAL_MATCH = 2 - MATCHING_TYPE_INVERSE_MATCH = 3 - - -class MetadataType(enum.IntEnum): - """ - Type of metadata containing the finding. - - Attributes: - METADATATYPE_UNSPECIFIED (int): Unused - STORAGE_METADATA (int): General file metadata provided by GCS. - """ - - METADATATYPE_UNSPECIFIED = 0 - STORAGE_METADATA = 2 - - -class RelationalOperator(enum.IntEnum): - """ - Operators available for comparing the value of fields. - - Attributes: - RELATIONAL_OPERATOR_UNSPECIFIED (int): Unused - EQUAL_TO (int): Equal. Attempts to match even with incompatible types. - NOT_EQUAL_TO (int): Not equal to. Attempts to match even with incompatible types. - GREATER_THAN (int): Greater than. - LESS_THAN (int): Less than. - GREATER_THAN_OR_EQUALS (int): Greater than or equals. - LESS_THAN_OR_EQUALS (int): Less than or equals. - EXISTS (int): Exists - """ - - RELATIONAL_OPERATOR_UNSPECIFIED = 0 - EQUAL_TO = 1 - NOT_EQUAL_TO = 2 - GREATER_THAN = 3 - LESS_THAN = 4 - GREATER_THAN_OR_EQUALS = 5 - LESS_THAN_OR_EQUALS = 6 - EXISTS = 7 - - -class StoredInfoTypeState(enum.IntEnum): - """ - State of a StoredInfoType version. - - Attributes: - STORED_INFO_TYPE_STATE_UNSPECIFIED (int): Unused - PENDING (int): StoredInfoType version is being created. - READY (int): StoredInfoType version is ready for use. - FAILED (int): StoredInfoType creation failed. All relevant error messages are - returned in the ``StoredInfoTypeVersion`` message. - INVALID (int): StoredInfoType is no longer valid because artifacts stored in - user-controlled storage were modified. To fix an invalid StoredInfoType, - use the ``UpdateStoredInfoType`` method to create a new version. - """ - - STORED_INFO_TYPE_STATE_UNSPECIFIED = 0 - PENDING = 1 - READY = 2 - FAILED = 3 - INVALID = 4 - - -class BigQueryOptions(object): - class SampleMethod(enum.IntEnum): - """ - How to sample rows if not all rows are scanned. Meaningful only when - used in conjunction with either rows_limit or rows_limit_percent. If not - specified, scanning would start from the top. - - Attributes: - SAMPLE_METHOD_UNSPECIFIED (int) - TOP (int): Scan from the top (default). - RANDOM_START (int): Randomly pick the row to start scanning. The scanned rows are contiguous. - """ - - SAMPLE_METHOD_UNSPECIFIED = 0 - TOP = 1 - RANDOM_START = 2 - - -class ByteContentItem(object): - class BytesType(enum.IntEnum): - """ - The type of data being sent for inspection. - - Attributes: - BYTES_TYPE_UNSPECIFIED (int): Unused - IMAGE (int): Any image type. - IMAGE_JPEG (int): jpeg - IMAGE_BMP (int): bmp - IMAGE_PNG (int): png - IMAGE_SVG (int): svg - TEXT_UTF8 (int): plain text - WORD_DOCUMENT (int): docx, docm, dotx, dotm - PDF (int): pdf - AVRO (int): avro - """ - - BYTES_TYPE_UNSPECIFIED = 0 - IMAGE = 6 - IMAGE_JPEG = 1 - IMAGE_BMP = 2 - IMAGE_PNG = 3 - IMAGE_SVG = 4 - TEXT_UTF8 = 5 - WORD_DOCUMENT = 7 - PDF = 8 - AVRO = 11 - - -class CharsToIgnore(object): - class CommonCharsToIgnore(enum.IntEnum): - """ - Convenience enum for indication common characters to not transform. - - Attributes: - COMMON_CHARS_TO_IGNORE_UNSPECIFIED (int): Unused. - NUMERIC (int): 0-9 - ALPHA_UPPER_CASE (int): A-Z - ALPHA_LOWER_CASE (int): a-z - PUNCTUATION (int): US Punctuation, one of !"#$%&'()*+,-./:;<=>?@[]^_`{|}~ - WHITESPACE (int): Whitespace character - """ - - COMMON_CHARS_TO_IGNORE_UNSPECIFIED = 0 - NUMERIC = 1 - ALPHA_UPPER_CASE = 2 - ALPHA_LOWER_CASE = 3 - PUNCTUATION = 4 - WHITESPACE = 5 - - -class CloudStorageOptions(object): - class SampleMethod(enum.IntEnum): - """ - How to sample bytes if not all bytes are scanned. Meaningful only - when used in conjunction with bytes_limit_per_file. If not specified, - scanning would start from the top. - - Attributes: - SAMPLE_METHOD_UNSPECIFIED (int) - TOP (int): Scan from the top (default). - RANDOM_START (int): For each file larger than bytes_limit_per_file, randomly pick the - offset to start scanning. The scanned bytes are contiguous. - """ - - SAMPLE_METHOD_UNSPECIFIED = 0 - TOP = 1 - RANDOM_START = 2 - - -class CryptoReplaceFfxFpeConfig(object): - class FfxCommonNativeAlphabet(enum.IntEnum): - """ - These are commonly used subsets of the alphabet that the FFX mode - natively supports. In the algorithm, the alphabet is selected using - the "radix". Therefore each corresponds to particular radix. - - Attributes: - FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED (int): Unused. - NUMERIC (int): [0-9] (radix of 10) - HEXADECIMAL (int): [0-9A-F] (radix of 16) - UPPER_CASE_ALPHA_NUMERIC (int): [0-9A-Z] (radix of 36) - ALPHA_NUMERIC (int): [0-9A-Za-z] (radix of 62) - """ - - FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED = 0 - NUMERIC = 1 - HEXADECIMAL = 2 - UPPER_CASE_ALPHA_NUMERIC = 3 - ALPHA_NUMERIC = 4 - - -class CustomInfoType(object): - class ExclusionType(enum.IntEnum): - """ - Attributes: - EXCLUSION_TYPE_UNSPECIFIED (int): A finding of this custom info type will not be excluded from results. - EXCLUSION_TYPE_EXCLUDE (int): A finding of this custom info type will be excluded from final results, - but can still affect rule execution. - """ - - EXCLUSION_TYPE_UNSPECIFIED = 0 - EXCLUSION_TYPE_EXCLUDE = 1 - - -class DlpJob(object): - class JobState(enum.IntEnum): - """ - Possible states of a job. New items may be added. - - Attributes: - JOB_STATE_UNSPECIFIED (int): Unused. - PENDING (int): The job has not yet started. - RUNNING (int): The job is currently running. Once a job has finished it will transition - to FAILED or DONE. - DONE (int): The job is no longer running. - CANCELED (int): The job was canceled before it could complete. - FAILED (int): The job had an error and did not complete. - ACTIVE (int): The job is currently accepting findings via hybridInspect. - A hybrid job in ACTIVE state may continue to have findings added to it - through calling of hybridInspect. After the job has finished no more - calls to hybridInspect may be made. ACTIVE jobs can transition to DONE. - """ - - JOB_STATE_UNSPECIFIED = 0 - PENDING = 1 - RUNNING = 2 - DONE = 3 - CANCELED = 4 - FAILED = 5 - ACTIVE = 6 - - -class JobTrigger(object): - class Status(enum.IntEnum): - """ - Whether the trigger is currently active. If PAUSED or CANCELLED, no jobs - will be created with this configuration. The service may automatically - pause triggers experiencing frequent errors. To restart a job, set the - status to HEALTHY after correcting user errors. - - Attributes: - STATUS_UNSPECIFIED (int): Unused. - HEALTHY (int): Trigger is healthy. - PAUSED (int): Trigger is temporarily paused. - CANCELLED (int): Trigger is cancelled and can not be resumed. - """ - - STATUS_UNSPECIFIED = 0 - HEALTHY = 1 - PAUSED = 2 - CANCELLED = 3 - - -class OutputStorageConfig(object): - class OutputSchema(enum.IntEnum): - """ - Predefined schemas for storing findings. - Only for use with external storage. - - Attributes: - OUTPUT_SCHEMA_UNSPECIFIED (int): Unused. - BASIC_COLUMNS (int): Basic schema including only ``info_type``, ``quote``, ``certainty``, - and ``timestamp``. - GCS_COLUMNS (int): Schema tailored to findings from scanning Google Cloud Storage. - DATASTORE_COLUMNS (int): Schema tailored to findings from scanning Google Datastore. - BIG_QUERY_COLUMNS (int): Schema tailored to findings from scanning Google BigQuery. - ALL_COLUMNS (int): Schema containing all columns. - """ - - OUTPUT_SCHEMA_UNSPECIFIED = 0 - BASIC_COLUMNS = 1 - GCS_COLUMNS = 2 - DATASTORE_COLUMNS = 3 - BIG_QUERY_COLUMNS = 4 - ALL_COLUMNS = 5 - - -class RecordCondition(object): - class Expressions(object): - class LogicalOperator(enum.IntEnum): - """ - Logical operators for conditional checks. - - Attributes: - LOGICAL_OPERATOR_UNSPECIFIED (int): Unused - AND (int): Conditional AND - """ - - LOGICAL_OPERATOR_UNSPECIFIED = 0 - AND = 1 - - -class TimePartConfig(object): - class TimePart(enum.IntEnum): - """ - Components that make up time. - - Attributes: - TIME_PART_UNSPECIFIED (int): Unused - YEAR (int): [0-9999] - MONTH (int): [1-12] - DAY_OF_MONTH (int): [1-31] - DAY_OF_WEEK (int): [1-7] - WEEK_OF_YEAR (int): [1-53] - HOUR_OF_DAY (int): [0-23] - """ - - TIME_PART_UNSPECIFIED = 0 - YEAR = 1 - MONTH = 2 - DAY_OF_MONTH = 3 - DAY_OF_WEEK = 4 - WEEK_OF_YEAR = 5 - HOUR_OF_DAY = 6 - - -class TransformationSummary(object): - class TransformationResultCode(enum.IntEnum): - """ - Possible outcomes of transformations. - - Attributes: - TRANSFORMATION_RESULT_CODE_UNSPECIFIED (int): Unused - SUCCESS (int): Transformation completed without an error. - ERROR (int): Transformation had an error. - """ - - TRANSFORMATION_RESULT_CODE_UNSPECIFIED = 0 - SUCCESS = 1 - ERROR = 2 diff --git a/google/cloud/dlp_v2/gapic/transports/__init__.py b/google/cloud/dlp_v2/gapic/transports/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/google/cloud/dlp_v2/gapic/transports/dlp_service_grpc_transport.py b/google/cloud/dlp_v2/gapic/transports/dlp_service_grpc_transport.py deleted file mode 100644 index 47a04aa6..00000000 --- a/google/cloud/dlp_v2/gapic/transports/dlp_service_grpc_transport.py +++ /dev/null @@ -1,627 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.dlp_v2.proto import dlp_pb2_grpc - - -class DlpServiceGrpcTransport(object): - """gRPC transport class providing stubs for - google.privacy.dlp.v2 DlpService API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - - def __init__( - self, channel=None, credentials=None, address="dlp.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = {"dlp_service_stub": dlp_pb2_grpc.DlpServiceStub(channel)} - - @classmethod - def create_channel( - cls, address="dlp.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def inspect_content(self): - """Return the gRPC stub for :meth:`DlpServiceClient.inspect_content`. - - Finds potentially sensitive info in content. - This method has limits on input size, processing time, and output size. - - When no InfoTypes or CustomInfoTypes are specified in this request, the - system will automatically choose what detectors to run. By default this may - be all types, but may change over time as detectors are updated. - - For how to guides, see https://cloud.google.com/dlp/docs/inspecting-images - and https://cloud.google.com/dlp/docs/inspecting-text, - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].InspectContent - - @property - def redact_image(self): - """Return the gRPC stub for :meth:`DlpServiceClient.redact_image`. - - Redacts potentially sensitive info from an image. - This method has limits on input size, processing time, and output size. - See https://cloud.google.com/dlp/docs/redacting-sensitive-data-images to - learn more. - - When no InfoTypes or CustomInfoTypes are specified in this request, the - system will automatically choose what detectors to run. By default this may - be all types, but may change over time as detectors are updated. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].RedactImage - - @property - def deidentify_content(self): - """Return the gRPC stub for :meth:`DlpServiceClient.deidentify_content`. - - De-identifies potentially sensitive info from a ContentItem. - This method has limits on input size and output size. - See https://cloud.google.com/dlp/docs/deidentify-sensitive-data to - learn more. - - When no InfoTypes or CustomInfoTypes are specified in this request, the - system will automatically choose what detectors to run. By default this may - be all types, but may change over time as detectors are updated. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].DeidentifyContent - - @property - def reidentify_content(self): - """Return the gRPC stub for :meth:`DlpServiceClient.reidentify_content`. - - Re-identifies content that has been de-identified. See - https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example - to learn more. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].ReidentifyContent - - @property - def list_info_types(self): - """Return the gRPC stub for :meth:`DlpServiceClient.list_info_types`. - - Returns a list of the sensitive information types that the DLP API - supports. See https://cloud.google.com/dlp/docs/infotypes-reference to - learn more. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].ListInfoTypes - - @property - def create_inspect_template(self): - """Return the gRPC stub for :meth:`DlpServiceClient.create_inspect_template`. - - Creates an InspectTemplate for re-using frequently used configuration - for inspecting content, images, and storage. - See https://cloud.google.com/dlp/docs/creating-templates to learn more. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].CreateInspectTemplate - - @property - def update_inspect_template(self): - """Return the gRPC stub for :meth:`DlpServiceClient.update_inspect_template`. - - Updates the InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates to learn more. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].UpdateInspectTemplate - - @property - def get_inspect_template(self): - """Return the gRPC stub for :meth:`DlpServiceClient.get_inspect_template`. - - Gets an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates to learn more. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].GetInspectTemplate - - @property - def list_inspect_templates(self): - """Return the gRPC stub for :meth:`DlpServiceClient.list_inspect_templates`. - - Lists InspectTemplates. - See https://cloud.google.com/dlp/docs/creating-templates to learn more. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].ListInspectTemplates - - @property - def delete_inspect_template(self): - """Return the gRPC stub for :meth:`DlpServiceClient.delete_inspect_template`. - - Deletes an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates to learn more. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].DeleteInspectTemplate - - @property - def create_deidentify_template(self): - """Return the gRPC stub for :meth:`DlpServiceClient.create_deidentify_template`. - - Creates a DeidentifyTemplate for re-using frequently used configuration - for de-identifying content, images, and storage. - See https://cloud.google.com/dlp/docs/creating-templates-deid to learn - more. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].CreateDeidentifyTemplate - - @property - def update_deidentify_template(self): - """Return the gRPC stub for :meth:`DlpServiceClient.update_deidentify_template`. - - Updates the DeidentifyTemplate. - See https://cloud.google.com/dlp/docs/creating-templates-deid to learn - more. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].UpdateDeidentifyTemplate - - @property - def get_deidentify_template(self): - """Return the gRPC stub for :meth:`DlpServiceClient.get_deidentify_template`. - - Gets a DeidentifyTemplate. - See https://cloud.google.com/dlp/docs/creating-templates-deid to learn - more. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].GetDeidentifyTemplate - - @property - def list_deidentify_templates(self): - """Return the gRPC stub for :meth:`DlpServiceClient.list_deidentify_templates`. - - Lists DeidentifyTemplates. - See https://cloud.google.com/dlp/docs/creating-templates-deid to learn - more. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].ListDeidentifyTemplates - - @property - def delete_deidentify_template(self): - """Return the gRPC stub for :meth:`DlpServiceClient.delete_deidentify_template`. - - Deletes a DeidentifyTemplate. - See https://cloud.google.com/dlp/docs/creating-templates-deid to learn - more. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].DeleteDeidentifyTemplate - - @property - def create_dlp_job(self): - """Return the gRPC stub for :meth:`DlpServiceClient.create_dlp_job`. - - Creates a new job to inspect storage or calculate risk metrics. - See https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more. - - When no InfoTypes or CustomInfoTypes are specified in inspect jobs, the - system will automatically choose what detectors to run. By default this may - be all types, but may change over time as detectors are updated. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].CreateDlpJob - - @property - def list_dlp_jobs(self): - """Return the gRPC stub for :meth:`DlpServiceClient.list_dlp_jobs`. - - Lists DlpJobs that match the specified filter in the request. - See https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].ListDlpJobs - - @property - def get_dlp_job(self): - """Return the gRPC stub for :meth:`DlpServiceClient.get_dlp_job`. - - Gets the latest state of a long-running DlpJob. - See https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].GetDlpJob - - @property - def delete_dlp_job(self): - """Return the gRPC stub for :meth:`DlpServiceClient.delete_dlp_job`. - - Deletes a long-running DlpJob. This method indicates that the client is - no longer interested in the DlpJob result. The job will be cancelled if - possible. - See https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].DeleteDlpJob - - @property - def cancel_dlp_job(self): - """Return the gRPC stub for :meth:`DlpServiceClient.cancel_dlp_job`. - - Starts asynchronous cancellation on a long-running DlpJob. The server - makes a best effort to cancel the DlpJob, but success is not - guaranteed. - See https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].CancelDlpJob - - @property - def finish_dlp_job(self): - """Return the gRPC stub for :meth:`DlpServiceClient.finish_dlp_job`. - - Finish a running hybrid DlpJob. Triggers the finalization steps and running - of any enabled actions that have not yet run. - Early access feature is in a pre-release state and might change or have - limited support. For more information, see - https://cloud.google.com/products#product-launch-stages. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].FinishDlpJob - - @property - def hybrid_inspect_dlp_job(self): - """Return the gRPC stub for :meth:`DlpServiceClient.hybrid_inspect_dlp_job`. - - Inspect hybrid content and store findings to a job. - To review the findings inspect the job. Inspection will occur - asynchronously. - Early access feature is in a pre-release state and might change or have - limited support. For more information, see - https://cloud.google.com/products#product-launch-stages. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].HybridInspectDlpJob - - @property - def list_job_triggers(self): - """Return the gRPC stub for :meth:`DlpServiceClient.list_job_triggers`. - - Lists job triggers. - See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].ListJobTriggers - - @property - def get_job_trigger(self): - """Return the gRPC stub for :meth:`DlpServiceClient.get_job_trigger`. - - Gets a job trigger. - See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].GetJobTrigger - - @property - def delete_job_trigger(self): - """Return the gRPC stub for :meth:`DlpServiceClient.delete_job_trigger`. - - Deletes a job trigger. - See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].DeleteJobTrigger - - @property - def hybrid_inspect_job_trigger(self): - """Return the gRPC stub for :meth:`DlpServiceClient.hybrid_inspect_job_trigger`. - - Inspect hybrid content and store findings to a trigger. The inspection - will be processed asynchronously. To review the findings monitor the - jobs within the trigger. - Early access feature is in a pre-release state and might change or have - limited support. For more information, see - https://cloud.google.com/products#product-launch-stages. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].HybridInspectJobTrigger - - @property - def update_job_trigger(self): - """Return the gRPC stub for :meth:`DlpServiceClient.update_job_trigger`. - - Updates a job trigger. - See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].UpdateJobTrigger - - @property - def create_job_trigger(self): - """Return the gRPC stub for :meth:`DlpServiceClient.create_job_trigger`. - - Creates a job trigger to run DLP actions such as scanning storage for - sensitive information on a set schedule. - See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].CreateJobTrigger - - @property - def create_stored_info_type(self): - """Return the gRPC stub for :meth:`DlpServiceClient.create_stored_info_type`. - - Creates a pre-built stored infoType to be used for inspection. - See https://cloud.google.com/dlp/docs/creating-stored-infotypes to - learn more. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].CreateStoredInfoType - - @property - def update_stored_info_type(self): - """Return the gRPC stub for :meth:`DlpServiceClient.update_stored_info_type`. - - Updates the stored infoType by creating a new version. The existing version - will continue to be used until the new version is ready. - See https://cloud.google.com/dlp/docs/creating-stored-infotypes to - learn more. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].UpdateStoredInfoType - - @property - def get_stored_info_type(self): - """Return the gRPC stub for :meth:`DlpServiceClient.get_stored_info_type`. - - Gets a stored infoType. - See https://cloud.google.com/dlp/docs/creating-stored-infotypes to - learn more. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].GetStoredInfoType - - @property - def list_stored_info_types(self): - """Return the gRPC stub for :meth:`DlpServiceClient.list_stored_info_types`. - - Lists stored infoTypes. - See https://cloud.google.com/dlp/docs/creating-stored-infotypes to - learn more. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].ListStoredInfoTypes - - @property - def delete_stored_info_type(self): - """Return the gRPC stub for :meth:`DlpServiceClient.delete_stored_info_type`. - - Deletes a stored infoType. - See https://cloud.google.com/dlp/docs/creating-stored-infotypes to - learn more. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dlp_service_stub"].DeleteStoredInfoType diff --git a/google/cloud/dlp_v2/proto/__init__.py b/google/cloud/dlp_v2/proto/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/google/cloud/dlp_v2/proto/dlp.proto b/google/cloud/dlp_v2/proto/dlp.proto index 29291fa5..781ac1c4 100644 --- a/google/cloud/dlp_v2/proto/dlp.proto +++ b/google/cloud/dlp_v2/proto/dlp.proto @@ -16,7 +16,6 @@ syntax = "proto3"; package google.privacy.dlp.v2; -import "google/api/annotations.proto"; import "google/api/client.proto"; import "google/api/field_behavior.proto"; import "google/api/resource.proto"; @@ -29,6 +28,7 @@ import "google/rpc/status.proto"; import "google/type/date.proto"; import "google/type/dayofweek.proto"; import "google/type/timeofday.proto"; +import "google/api/annotations.proto"; option csharp_namespace = "Google.Cloud.Dlp.V2"; option go_package = "google.golang.org/genproto/googleapis/privacy/dlp/v2;dlp"; @@ -37,6 +37,11 @@ option java_outer_classname = "DlpProto"; option java_package = "com.google.privacy.dlp.v2"; option php_namespace = "Google\\Cloud\\Dlp\\V2"; option ruby_package = "Google::Cloud::Dlp::V2"; +option (google.api.resource_definition) = { + type: "dlp.googleapis.com/DlpContent" + pattern: "projects/{project}/dlpContent" + pattern: "projects/{project}/locations/{location}/dlpContent" +}; option (google.api.resource_definition) = { type: "dlp.googleapis.com/OrganizationLocation" pattern: "organizations/{organization}/locations/{location}" @@ -808,6 +813,12 @@ message ByteContentItem { // avro AVRO = 11; + + // csv + CSV = 12; + + // tsv + TSV = 13; } // The type of data stored in the bytes string. Default will be TEXT_UTF8. @@ -873,8 +884,8 @@ message Finding { }; // Resource name in format - // projects/{project}/locations/{location}/findings/{finding} - // Populated only when viewing persisted findings. + // projects/{project}/locations/{location}/findings/{finding} Populated only + // when viewing persisted findings. string name = 14; // The content that was found. Even if the content is not textual, it @@ -1143,11 +1154,12 @@ message RedactImageRequest { Color redaction_color = 3; } - // The parent resource name, for example projects/my-project-id - // or projects/my-project-id/locations/{location_id}. + // The parent resource name. + // - Format:projects/[PROJECT-ID] + // - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] string parent = 1 [(google.api.resource_reference) = { - type: "cloudresourcemanager.googleapis.com/Project" - }]; + child_type: "dlp.googleapis.com/DlpContent" + }]; // Deprecated. This field has no effect. string location_id = 8; @@ -1194,11 +1206,12 @@ message RedactImageResponse { // Request to de-identify a list of items. message DeidentifyContentRequest { - // The parent resource name, for example projects/my-project-id - // or projects/my-project-id/locations/{location_id}. + // Parent resource name. + // - Format:projects/[PROJECT-ID] + // - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] string parent = 1 [(google.api.resource_reference) = { - type: "cloudresourcemanager.googleapis.com/Project" - }]; + child_type: "dlp.googleapis.com/DlpContent" + }]; // Configuration for the de-identification of the content item. // Items specified here will override the template referenced by the @@ -1243,10 +1256,12 @@ message DeidentifyContentResponse { // Request to re-identify an item. message ReidentifyContentRequest { // Required. The parent resource name. + // - Format:projects/[PROJECT-ID] + // - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - type: "cloudresourcemanager.googleapis.com/Project" + child_type: "dlp.googleapis.com/DlpContent" } ]; @@ -1298,11 +1313,12 @@ message ReidentifyContentResponse { // Request to search for potentially sensitive info in a ContentItem. message InspectContentRequest { - // The parent resource name, for example projects/my-project-id - // or projects/my-project-id/locations/{location_id} + // Parent resource name. + // - Format:projects/[PROJECT-ID] + // - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] string parent = 1 [(google.api.resource_reference) = { - type: "cloudresourcemanager.googleapis.com/Project" - }]; + child_type: "dlp.googleapis.com/DlpContent" + }]; // Configuration for the inspector. What specified here will override // the template referenced by the inspect_template_name argument. @@ -1432,7 +1448,7 @@ message InspectDataSourceDetails { Result result = 3; } -// Statistics related to processing hybrid inspect requests.s +// Statistics related to processing hybrid inspect requests. message HybridInspectStatistics { // The number of hybrid inspection requests processed within this job. int64 processed_count = 1; @@ -1467,8 +1483,10 @@ message InfoTypeDescription { // Request for the list of infoTypes. message ListInfoTypesRequest { - // The parent resource name, for example locations/{location_id} + // The parent resource name. + // - Format:locations/[LOCATION-ID] string parent = 4; + // BCP-47 language code for localized infoType friendly // names. If omitted, or if localized strings are not available, // en-US strings will be returned. @@ -1553,11 +1571,10 @@ message StatisticalTable { BigQueryTable table = 3 [(google.api.field_behavior) = REQUIRED]; // Required. Quasi-identifier columns. - repeated QuasiIdentifierField quasi_ids = 1 - [(google.api.field_behavior) = REQUIRED]; + repeated QuasiIdentifierField quasi_ids = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. The relative frequency column must contain a floating-point - // number between 0 and 1 (inclusive). Null values are assumed to be zero. + // Required. The relative frequency column must contain a floating-point number + // between 0 and 1 (inclusive). Null values are assumed to be zero. FieldId relative_frequency = 2 [(google.api.field_behavior) = REQUIRED]; } @@ -1670,16 +1687,15 @@ message PrivacyMetric { BigQueryTable table = 3 [(google.api.field_behavior) = REQUIRED]; // Required. Quasi-identifier columns. - repeated QuasiIdField quasi_ids = 1 - [(google.api.field_behavior) = REQUIRED]; + repeated QuasiIdField quasi_ids = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. The relative frequency column must contain a floating-point - // number between 0 and 1 (inclusive). Null values are assumed to be zero. + // Required. The relative frequency column must contain a floating-point number + // between 0 and 1 (inclusive). Null values are assumed to be zero. FieldId relative_frequency = 2 [(google.api.field_behavior) = REQUIRED]; } - // Required. Fields considered to be quasi-identifiers. No two columns can - // have the same tag. + // Required. Fields considered to be quasi-identifiers. No two columns can have the + // same tag. repeated TaggedField quasi_ids = 1 [(google.api.field_behavior) = REQUIRED]; // ISO 3166-1 alpha-2 region code to use in the statistical modeling. @@ -1698,8 +1714,8 @@ message PrivacyMetric { // Similarly to the k-map metric, we cannot compute δ-presence exactly without // knowing the attack dataset, so we use a statistical model instead. message DeltaPresenceEstimationConfig { - // Required. Fields considered to be quasi-identifiers. No two fields can - // have the same tag. + // Required. Fields considered to be quasi-identifiers. No two fields can have the + // same tag. repeated QuasiId quasi_ids = 1 [(google.api.field_behavior) = REQUIRED]; // ISO 3166-1 alpha-2 region code to use in the statistical modeling. @@ -1772,8 +1788,7 @@ message AnalyzeDataSourceRiskDetails { } // Histogram of value frequencies in the column. - repeated CategoricalStatsHistogramBucket value_frequency_histogram_buckets = - 5; + repeated CategoricalStatsHistogramBucket value_frequency_histogram_buckets = 5; } // Result of the k-anonymity computation. @@ -1853,8 +1868,7 @@ message AnalyzeDataSourceRiskDetails { } // Histogram of l-diversity equivalence class sensitive value frequencies. - repeated LDiversityHistogramBucket - sensitive_value_frequency_histogram_buckets = 5; + repeated LDiversityHistogramBucket sensitive_value_frequency_histogram_buckets = 5; } // Result of the reidentifiability analysis. Note that these results are an @@ -2268,12 +2282,16 @@ message ReplaceValueConfig { } // Replace each matching finding with the name of the info_type. -message ReplaceWithInfoTypeConfig {} +message ReplaceWithInfoTypeConfig { + +} // Redact a given value. For example, if used with an `InfoTypeTransformation` // transforming PHONE_NUMBER, and input 'My phone number is 206-555-0123', the // output would be 'My phone number is '. -message RedactConfig {} +message RedactConfig { + +} // Characters to skip when doing deidentification of a value. These will be left // alone and skipped. @@ -2357,18 +2375,18 @@ message CharacterMaskConfig { // // See https://cloud.google.com/dlp/docs/concepts-bucketing to learn more. message FixedSizeBucketingConfig { - // Required. Lower bound value of buckets. All values less than `lower_bound` - // are grouped together into a single bucket; for example if `lower_bound` = - // 10, then all values less than 10 are replaced with the value “-10”. + // Required. Lower bound value of buckets. All values less than `lower_bound` are + // grouped together into a single bucket; for example if `lower_bound` = 10, + // then all values less than 10 are replaced with the value "-10". Value lower_bound = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. Upper bound value of buckets. All values greater than upper_bound - // are grouped together into a single bucket; for example if `upper_bound` = - // 89, then all values greater than 89 are replaced with the value “89+”. + // Required. Upper bound value of buckets. All values greater than upper_bound are + // grouped together into a single bucket; for example if `upper_bound` = 89, + // then all values greater than 89 are replaced with the value "89+". Value upper_bound = 2 [(google.api.field_behavior) = REQUIRED]; - // Required. Size of each bucket (except for minimum and maximum buckets). So - // if `lower_bound` = 10, `upper_bound` = 89, and `bucket_size` = 10, then the + // Required. Size of each bucket (except for minimum and maximum buckets). So if + // `lower_bound` = 10, `upper_bound` = 89, and `bucket_size` = 10, then the // following buckets would be used: -10, 10-20, 20-30, 30-40, 40-50, 50-60, // 60-70, 70-80, 80-89, 89+. Precision up to 2 decimals works. double bucket_size = 3 [(google.api.field_behavior) = REQUIRED]; @@ -2393,9 +2411,8 @@ message BucketingConfig { // Upper bound of the range, exclusive; type must match min. Value max = 2; - // Replacement value for this bucket. If not provided - // the default behavior will be to hyphenate the min-max range. - Value replacement_value = 3; + // Required. Replacement value for this bucket. + Value replacement_value = 3 [(google.api.field_behavior) = REQUIRED]; } // Set of buckets. Ranges must be non-overlapping. @@ -2423,16 +2440,16 @@ message CryptoReplaceFfxFpeConfig { // Unused. FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED = 0; - // [0-9] (radix of 10) + // `[0-9]` (radix of 10) NUMERIC = 1; - // [0-9A-F] (radix of 16) + // `[0-9A-F]` (radix of 16) HEXADECIMAL = 2; - // [0-9A-Z] (radix of 36) + // `[0-9A-Z]` (radix of 36) UPPER_CASE_ALPHA_NUMERIC = 3; - // [0-9A-Za-z] (radix of 62) + // `[0-9A-Za-z]` (radix of 62) ALPHA_NUMERIC = 4; } @@ -2491,7 +2508,7 @@ message CryptoReplaceFfxFpeConfig { // // This annotation identifies the surrogate when inspecting content using the // custom infoType - // [`SurrogateType`](/dlp/docs/reference/rest/v2/InspectConfig#surrogatetype). + // [`SurrogateType`](https://cloud.google.com/dlp/docs/reference/rest/v2/InspectConfig#surrogatetype). // This facilitates reversal of the surrogate when it occurs in free text. // // In order for inspection to work properly, the name of this infoType must @@ -2562,15 +2579,14 @@ message KmsWrappedCryptoKey { // same context. See https://cloud.google.com/dlp/docs/concepts-date-shifting // to learn more. message DateShiftConfig { - // Required. Range of shift in days. Actual shift will be selected at random - // within this range (inclusive ends). Negative means shift to earlier in - // time. Must not be more than 365250 days (1000 years) each direction. + // Required. Range of shift in days. Actual shift will be selected at random within this + // range (inclusive ends). Negative means shift to earlier in time. Must not + // be more than 365250 days (1000 years) each direction. // // For example, 3 means shift date to at most 3 days into the future. int32 upper_bound_days = 1 [(google.api.field_behavior) = REQUIRED]; - // Required. For example, -5 means shift date to at most 5 days back in the - // past. + // Required. For example, -5 means shift date to at most 5 days back in the past. int32 lower_bound_days = 2 [(google.api.field_behavior) = REQUIRED]; // Points to the field that contains the context, for example, an entity id. @@ -2982,7 +2998,7 @@ message Action { // Publish a message into given Pub/Sub topic when DlpJob has completed. The // message contains a single field, `DlpJobName`, which is equal to the // finished job's - // [`DlpJob.name`](/dlp/docs/reference/rest/v2/projects.dlpJobs#DlpJob). + // [`DlpJob.name`](https://cloud.google.com/dlp/docs/reference/rest/v2/projects.dlpJobs#DlpJob). // Compatible with: Inspect, Risk message PublishToPubSub { // Cloud Pub/Sub topic to send notifications to. The topic must have given @@ -3058,8 +3074,11 @@ message Action { // Request message for CreateInspectTemplate. message CreateInspectTemplateRequest { - // Required. The parent resource name, for example projects/my-project-id or - // organizations/my-org-id or projects/my-project-id/locations/{location-id}. + // Required. Parent resource name. + // - Format:projects/[PROJECT-ID] + // - Format:organizations/[ORGANIZATION-ID] + // - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + // - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { @@ -3082,8 +3101,8 @@ message CreateInspectTemplateRequest { // Request message for UpdateInspectTemplate. message UpdateInspectTemplateRequest { - // Required. Resource name of organization and inspectTemplate to be updated, - // for example `organizations/433245324/inspectTemplates/432452342` or + // Required. Resource name of organization and inspectTemplate to be updated, for + // example `organizations/433245324/inspectTemplates/432452342` or // projects/project-id/inspectTemplates/432452342. string name = 1 [ (google.api.field_behavior) = REQUIRED, @@ -3101,8 +3120,8 @@ message UpdateInspectTemplateRequest { // Request message for GetInspectTemplate. message GetInspectTemplateRequest { - // Required. Resource name of the organization and inspectTemplate to be read, - // for example `organizations/433245324/inspectTemplates/432452342` or + // Required. Resource name of the organization and inspectTemplate to be read, for + // example `organizations/433245324/inspectTemplates/432452342` or // projects/project-id/inspectTemplates/432452342. string name = 1 [ (google.api.field_behavior) = REQUIRED, @@ -3114,8 +3133,11 @@ message GetInspectTemplateRequest { // Request message for ListInspectTemplates. message ListInspectTemplatesRequest { - // Required. The parent resource name, for example projects/my-project-id or - // organizations/my-org-id or projects/my-project-id/locations/{location_id}. + // Required. Parent resource name. + // - Format:projects/[PROJECT-ID] + // - Format:organizations/[ORGANIZATION-ID] + // - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + // - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { @@ -3162,9 +3184,9 @@ message ListInspectTemplatesResponse { // Request message for DeleteInspectTemplate. message DeleteInspectTemplateRequest { - // Required. Resource name of the organization and inspectTemplate to be - // deleted, for example `organizations/433245324/inspectTemplates/432452342` - // or projects/project-id/inspectTemplates/432452342. + // Required. Resource name of the organization and inspectTemplate to be deleted, for + // example `organizations/433245324/inspectTemplates/432452342` or + // projects/project-id/inspectTemplates/432452342. string name = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { @@ -3175,12 +3197,13 @@ message DeleteInspectTemplateRequest { // Request message for CreateJobTrigger. message CreateJobTriggerRequest { - // Required. The parent resource name, for example projects/my-project-id - // or projects/my-project-id/locations/{location_id}. + // Required. Parent resource name. + // - Format:projects/[PROJECT-ID] + // - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - type: "cloudresourcemanager.googleapis.com/Project" + child_type: "dlp.googleapis.com/JobTrigger" } ]; @@ -3203,7 +3226,9 @@ message ActivateJobTriggerRequest { // `projects/dlp-test-project/jobTriggers/53234423`. string name = 1 [ (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { type: "dlp.googleapis.com/JobTrigger" } + (google.api.resource_reference) = { + type: "dlp.googleapis.com/JobTrigger" + } ]; } @@ -3213,7 +3238,9 @@ message UpdateJobTriggerRequest { // `projects/dlp-test-project/jobTriggers/53234423`. string name = 1 [ (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { type: "dlp.googleapis.com/JobTrigger" } + (google.api.resource_reference) = { + type: "dlp.googleapis.com/JobTrigger" + } ]; // New JobTrigger value. @@ -3229,7 +3256,9 @@ message GetJobTriggerRequest { // `projects/dlp-test-project/jobTriggers/53234423`. string name = 1 [ (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { type: "dlp.googleapis.com/JobTrigger" } + (google.api.resource_reference) = { + type: "dlp.googleapis.com/JobTrigger" + } ]; } @@ -3237,12 +3266,13 @@ message GetJobTriggerRequest { // jobs such as calculating risk metrics or inspecting Google Cloud // Storage. message CreateDlpJobRequest { - // Required. The parent resource name, for example projects/my-project-id - // or projects/my-project-id/locations/{location_id}. + // Required. Parent resource name. + // - Format:projects/[PROJECT-ID] + // - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - type: "cloudresourcemanager.googleapis.com/Project" + child_type: "dlp.googleapis.com/DlpJob" } ]; @@ -3267,12 +3297,13 @@ message CreateDlpJobRequest { // Request message for ListJobTriggers. message ListJobTriggersRequest { - // Required. The parent resource name, for example `projects/my-project-id` - // or projects/my-project-id/locations/{location_id}. + // Required. Parent resource name. + // - Format:projects/[PROJECT-ID] + // - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - type: "cloudresourcemanager.googleapis.com/Project" + child_type: "dlp.googleapis.com/JobTrigger" } ]; @@ -3347,7 +3378,9 @@ message DeleteJobTriggerRequest { // `projects/dlp-test-project/jobTriggers/53234423`. string name = 1 [ (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { type: "dlp.googleapis.com/JobTrigger" } + (google.api.resource_reference) = { + type: "dlp.googleapis.com/JobTrigger" + } ]; } @@ -3443,18 +3476,21 @@ message GetDlpJobRequest { // Required. The name of the DlpJob resource. string name = 1 [ (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { type: "dlp.googleapis.com/DlpJob" } + (google.api.resource_reference) = { + type: "dlp.googleapis.com/DlpJob" + } ]; } // The request message for listing DLP jobs. message ListDlpJobsRequest { - // Required. The parent resource name, for example projects/my-project-id - // or projects/my-project-id/locations/{location_id}. + // Required. Parent resource name. + // - Format:projects/[PROJECT-ID] + // - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] string parent = 4 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { - type: "cloudresourcemanager.googleapis.com/Project" + child_type: "dlp.googleapis.com/DlpJob" } ]; @@ -3530,7 +3566,9 @@ message CancelDlpJobRequest { // Required. The name of the DlpJob resource to be cancelled. string name = 1 [ (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { type: "dlp.googleapis.com/DlpJob" } + (google.api.resource_reference) = { + type: "dlp.googleapis.com/DlpJob" + } ]; } @@ -3539,7 +3577,9 @@ message FinishDlpJobRequest { // Required. The name of the DlpJob resource to be cancelled. string name = 1 [ (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { type: "dlp.googleapis.com/DlpJob" } + (google.api.resource_reference) = { + type: "dlp.googleapis.com/DlpJob" + } ]; } @@ -3548,14 +3588,19 @@ message DeleteDlpJobRequest { // Required. The name of the DlpJob resource to be deleted. string name = 1 [ (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { type: "dlp.googleapis.com/DlpJob" } + (google.api.resource_reference) = { + type: "dlp.googleapis.com/DlpJob" + } ]; } // Request message for CreateDeidentifyTemplate. message CreateDeidentifyTemplateRequest { - // Required. The parent resource name, for example projects/my-project-id or - // organizations/my-org-id or projects/my-project-id/locations/{location_id}. + // Required. Parent resource name. + // - Format:projects/[PROJECT-ID] + // - Format:organizations/[ORGANIZATION-ID] + // - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + // - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { @@ -3564,8 +3609,7 @@ message CreateDeidentifyTemplateRequest { ]; // Required. The DeidentifyTemplate to create. - DeidentifyTemplate deidentify_template = 2 - [(google.api.field_behavior) = REQUIRED]; + DeidentifyTemplate deidentify_template = 2 [(google.api.field_behavior) = REQUIRED]; // The template id can contain uppercase and lowercase letters, // numbers, and hyphens; that is, it must match the regular @@ -3579,9 +3623,8 @@ message CreateDeidentifyTemplateRequest { // Request message for UpdateDeidentifyTemplate. message UpdateDeidentifyTemplateRequest { - // Required. Resource name of organization and deidentify template to be - // updated, for example - // `organizations/433245324/deidentifyTemplates/432452342` or + // Required. Resource name of organization and deidentify template to be updated, for + // example `organizations/433245324/deidentifyTemplates/432452342` or // projects/project-id/deidentifyTemplates/432452342. string name = 1 [ (google.api.field_behavior) = REQUIRED, @@ -3599,9 +3642,9 @@ message UpdateDeidentifyTemplateRequest { // Request message for GetDeidentifyTemplate. message GetDeidentifyTemplateRequest { - // Required. Resource name of the organization and deidentify template to be - // read, for example `organizations/433245324/deidentifyTemplates/432452342` - // or projects/project-id/deidentifyTemplates/432452342. + // Required. Resource name of the organization and deidentify template to be read, for + // example `organizations/433245324/deidentifyTemplates/432452342` or + // projects/project-id/deidentifyTemplates/432452342. string name = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { @@ -3612,8 +3655,11 @@ message GetDeidentifyTemplateRequest { // Request message for ListDeidentifyTemplates. message ListDeidentifyTemplatesRequest { - // Required. The parent resource name, for example projects/my-project-id or - // organizations/my-org-id or projects/my-project-id/locations/{location_id}. + // Required. Parent resource name. + // - Format:projects/[PROJECT-ID] + // - Format:organizations/[ORGANIZATION-ID] + // - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + // - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { @@ -3661,9 +3707,8 @@ message ListDeidentifyTemplatesResponse { // Request message for DeleteDeidentifyTemplate. message DeleteDeidentifyTemplateRequest { - // Required. Resource name of the organization and deidentify template to be - // deleted, for example - // `organizations/433245324/deidentifyTemplates/432452342` or + // Required. Resource name of the organization and deidentify template to be deleted, + // for example `organizations/433245324/deidentifyTemplates/432452342` or // projects/project-id/deidentifyTemplates/432452342. string name = 1 [ (google.api.field_behavior) = REQUIRED, @@ -3790,8 +3835,11 @@ message StoredInfoType { // Request message for CreateStoredInfoType. message CreateStoredInfoTypeRequest { - // Required. The parent resource name, for example projects/my-project-id or - // organizations/my-org-id or projects/my-project-id/locations/{location_id} + // Required. Parent resource name. + // - Format:projects/[PROJECT-ID] + // - Format:organizations/[ORGANIZATION-ID] + // - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + // - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { @@ -3835,8 +3883,8 @@ message UpdateStoredInfoTypeRequest { // Request message for GetStoredInfoType. message GetStoredInfoTypeRequest { - // Required. Resource name of the organization and storedInfoType to be read, - // for example `organizations/433245324/storedInfoTypes/432452342` or + // Required. Resource name of the organization and storedInfoType to be read, for + // example `organizations/433245324/storedInfoTypes/432452342` or // projects/project-id/storedInfoTypes/432452342. string name = 1 [ (google.api.field_behavior) = REQUIRED, @@ -3848,8 +3896,11 @@ message GetStoredInfoTypeRequest { // Request message for ListStoredInfoTypes. message ListStoredInfoTypesRequest { - // Required. The parent resource name, for example projects/my-project-id or - // organizations/my-org-id or projects/my-project-id/locations/{location_id}. + // Required. Parent resource name. + // - Format:projects/[PROJECT-ID] + // - Format:organizations/[ORGANIZATION-ID] + // - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + // - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] string parent = 1 [ (google.api.field_behavior) = REQUIRED, (google.api.resource_reference) = { @@ -3897,8 +3948,8 @@ message ListStoredInfoTypesResponse { // Request message for DeleteStoredInfoType. message DeleteStoredInfoTypeRequest { - // Required. Resource name of the organization and storedInfoType to be - // deleted, for example `organizations/433245324/storedInfoTypes/432452342` or + // Required. Resource name of the organization and storedInfoType to be deleted, for + // example `organizations/433245324/storedInfoTypes/432452342` or // projects/project-id/storedInfoTypes/432452342. string name = 1 [ (google.api.field_behavior) = REQUIRED, @@ -3910,11 +3961,13 @@ message DeleteStoredInfoTypeRequest { // Request to search for potentially sensitive info in a custom location. message HybridInspectJobTriggerRequest { - // Required. Resource name of the trigger to execute a hybrid inspect on, for - // example `projects/dlp-test-project/jobTriggers/53234423`. + // Required. Resource name of the trigger to execute a hybrid inspect on, for example + // `projects/dlp-test-project/jobTriggers/53234423`. string name = 1 [ (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { type: "dlp.googleapis.com/JobTrigger" } + (google.api.resource_reference) = { + type: "dlp.googleapis.com/JobTrigger" + } ]; // The item to inspect. @@ -3923,11 +3976,13 @@ message HybridInspectJobTriggerRequest { // Request to search for potentially sensitive info in a custom location. message HybridInspectDlpJobRequest { - // Required. Resource name of the job to execute a hybrid inspect on, for - // example `projects/dlp-test-project/dlpJob/53234423`. + // Required. Resource name of the job to execute a hybrid inspect on, for example + // `projects/dlp-test-project/dlpJob/53234423`. string name = 1 [ (google.api.field_behavior) = REQUIRED, - (google.api.resource_reference) = { type: "dlp.googleapis.com/DlpJob" } + (google.api.resource_reference) = { + type: "dlp.googleapis.com/DlpJob" + } ]; // The item to inspect. @@ -4064,7 +4119,7 @@ enum MetadataType { // Unused METADATATYPE_UNSPECIFIED = 0; - // General file metadata provided by GCS. + // General file metadata provided by Cloud Storage. STORAGE_METADATA = 2; } diff --git a/google/cloud/dlp_v2/proto/dlp_pb2.py b/google/cloud/dlp_v2/proto/dlp_pb2.py deleted file mode 100644 index e4080ee6..00000000 --- a/google/cloud/dlp_v2/proto/dlp_pb2.py +++ /dev/null @@ -1,19574 +0,0 @@ -# -*- coding: utf-8 -*- -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/dlp_v2/proto/dlp.proto - -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.cloud.dlp_v2.proto import ( - storage_pb2 as google_dot_cloud_dot_dlp__v2_dot_proto_dot_storage__pb2, -) -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 -from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2 -from google.type import date_pb2 as google_dot_type_dot_date__pb2 -from google.type import dayofweek_pb2 as google_dot_type_dot_dayofweek__pb2 -from google.type import timeofday_pb2 as google_dot_type_dot_timeofday__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/dlp_v2/proto/dlp.proto", - package="google.privacy.dlp.v2", - syntax="proto3", - serialized_options=b"\n\031com.google.privacy.dlp.v2B\010DlpProtoP\001Z8google.golang.org/genproto/googleapis/privacy/dlp/v2;dlp\252\002\023Google.Cloud.Dlp.V2\312\002\023Google\\Cloud\\Dlp\\V2\352\002\026Google::Cloud::Dlp::V2\352A\\\n'dlp.googleapis.com/OrganizationLocation\0221organizations/{organization}/locations/{location}", - serialized_pb=b'\n#google/cloud/dlp_v2/proto/dlp.proto\x12\x15google.privacy.dlp.v2\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\'google/cloud/dlp_v2/proto/storage.proto\x1a\x1egoogle/protobuf/duration.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x17google/rpc/status.proto\x1a\x16google/type/date.proto\x1a\x1bgoogle/type/dayofweek.proto\x1a\x1bgoogle/type/timeofday.proto"G\n\x10\x45xcludeInfoTypes\x12\x33\n\ninfo_types\x18\x01 \x03(\x0b\x32\x1f.google.privacy.dlp.v2.InfoType"\xa0\x02\n\rExclusionRule\x12\x46\n\ndictionary\x18\x01 \x01(\x0b\x32\x30.google.privacy.dlp.v2.CustomInfoType.DictionaryH\x00\x12<\n\x05regex\x18\x02 \x01(\x0b\x32+.google.privacy.dlp.v2.CustomInfoType.RegexH\x00\x12\x45\n\x12\x65xclude_info_types\x18\x03 \x01(\x0b\x32\'.google.privacy.dlp.v2.ExcludeInfoTypesH\x00\x12:\n\rmatching_type\x18\x04 \x01(\x0e\x32#.google.privacy.dlp.v2.MatchingTypeB\x06\n\x04type"\xb1\x01\n\x0eInspectionRule\x12W\n\x0chotword_rule\x18\x01 \x01(\x0b\x32?.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRuleH\x00\x12>\n\x0e\x65xclusion_rule\x18\x02 \x01(\x0b\x32$.google.privacy.dlp.v2.ExclusionRuleH\x00\x42\x06\n\x04type"~\n\x11InspectionRuleSet\x12\x33\n\ninfo_types\x18\x01 \x03(\x0b\x32\x1f.google.privacy.dlp.v2.InfoType\x12\x34\n\x05rules\x18\x02 \x03(\x0b\x32%.google.privacy.dlp.v2.InspectionRule"\xc7\x05\n\rInspectConfig\x12\x33\n\ninfo_types\x18\x01 \x03(\x0b\x32\x1f.google.privacy.dlp.v2.InfoType\x12\x39\n\x0emin_likelihood\x18\x02 \x01(\x0e\x32!.google.privacy.dlp.v2.Likelihood\x12\x42\n\x06limits\x18\x03 \x01(\x0b\x32\x32.google.privacy.dlp.v2.InspectConfig.FindingLimits\x12\x15\n\rinclude_quote\x18\x04 \x01(\x08\x12\x1a\n\x12\x65xclude_info_types\x18\x05 \x01(\x08\x12@\n\x11\x63ustom_info_types\x18\x06 \x03(\x0b\x32%.google.privacy.dlp.v2.CustomInfoType\x12=\n\x0f\x63ontent_options\x18\x08 \x03(\x0e\x32$.google.privacy.dlp.v2.ContentOption\x12:\n\x08rule_set\x18\n \x03(\x0b\x32(.google.privacy.dlp.v2.InspectionRuleSet\x1a\x91\x02\n\rFindingLimits\x12\x1d\n\x15max_findings_per_item\x18\x01 \x01(\x05\x12 \n\x18max_findings_per_request\x18\x02 \x01(\x05\x12\x64\n\x1amax_findings_per_info_type\x18\x03 \x03(\x0b\x32@.google.privacy.dlp.v2.InspectConfig.FindingLimits.InfoTypeLimit\x1aY\n\rInfoTypeLimit\x12\x32\n\tinfo_type\x18\x01 \x01(\x0b\x32\x1f.google.privacy.dlp.v2.InfoType\x12\x14\n\x0cmax_findings\x18\x02 \x01(\x05"\x86\x02\n\x0f\x42yteContentItem\x12>\n\x04type\x18\x01 \x01(\x0e\x32\x30.google.privacy.dlp.v2.ByteContentItem.BytesType\x12\x0c\n\x04\x64\x61ta\x18\x02 \x01(\x0c"\xa4\x01\n\tBytesType\x12\x1a\n\x16\x42YTES_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05IMAGE\x10\x06\x12\x0e\n\nIMAGE_JPEG\x10\x01\x12\r\n\tIMAGE_BMP\x10\x02\x12\r\n\tIMAGE_PNG\x10\x03\x12\r\n\tIMAGE_SVG\x10\x04\x12\r\n\tTEXT_UTF8\x10\x05\x12\x11\n\rWORD_DOCUMENT\x10\x07\x12\x07\n\x03PDF\x10\x08\x12\x08\n\x04\x41VRO\x10\x0b"\x97\x01\n\x0b\x43ontentItem\x12\x0f\n\x05value\x18\x03 \x01(\tH\x00\x12-\n\x05table\x18\x04 \x01(\x0b\x32\x1c.google.privacy.dlp.v2.TableH\x00\x12;\n\tbyte_item\x18\x05 \x01(\x0b\x32&.google.privacy.dlp.v2.ByteContentItemH\x00\x42\x0b\n\tdata_item"\x9d\x01\n\x05Table\x12/\n\x07headers\x18\x01 \x03(\x0b\x32\x1e.google.privacy.dlp.v2.FieldId\x12.\n\x04rows\x18\x02 \x03(\x0b\x32 .google.privacy.dlp.v2.Table.Row\x1a\x33\n\x03Row\x12,\n\x06values\x18\x01 \x03(\x0b\x32\x1c.google.privacy.dlp.v2.Value"]\n\rInspectResult\x12\x30\n\x08\x66indings\x18\x01 \x03(\x0b\x32\x1e.google.privacy.dlp.v2.Finding\x12\x1a\n\x12\x66indings_truncated\x18\x02 \x01(\x08"\xcb\x05\n\x07\x46inding\x12\x0c\n\x04name\x18\x0e \x01(\t\x12\r\n\x05quote\x18\x01 \x01(\t\x12\x32\n\tinfo_type\x18\x02 \x01(\x0b\x32\x1f.google.privacy.dlp.v2.InfoType\x12\x35\n\nlikelihood\x18\x03 \x01(\x0e\x32!.google.privacy.dlp.v2.Likelihood\x12\x31\n\x08location\x18\x04 \x01(\x0b\x32\x1f.google.privacy.dlp.v2.Location\x12/\n\x0b\x63reate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x34\n\nquote_info\x18\x07 \x01(\x0b\x32 .google.privacy.dlp.v2.QuoteInfo\x12\x35\n\rresource_name\x18\x08 \x01(\tB\x1e\xfa\x41\x1b\n\x19\x64lp.googleapis.com/DlpJob\x12\x38\n\x0ctrigger_name\x18\t \x01(\tB"\xfa\x41\x1f\n\x1d\x64lp.googleapis.com/JobTrigger\x12:\n\x06labels\x18\n \x03(\x0b\x32*.google.privacy.dlp.v2.Finding.LabelsEntry\x12\x33\n\x0fjob_create_time\x18\x0b \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x30\n\x08job_name\x18\r \x01(\tB\x1e\xfa\x41\x1b\n\x19\x64lp.googleapis.com/DlpJob\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01:[\xea\x41X\n\x1a\x64lp.googleapis.com/Finding\x12:projects/{project}/locations/{location}/findings/{finding}"\xeb\x01\n\x08Location\x12\x30\n\nbyte_range\x18\x01 \x01(\x0b\x32\x1c.google.privacy.dlp.v2.Range\x12\x35\n\x0f\x63odepoint_range\x18\x02 \x01(\x0b\x32\x1c.google.privacy.dlp.v2.Range\x12\x41\n\x11\x63ontent_locations\x18\x07 \x03(\x0b\x32&.google.privacy.dlp.v2.ContentLocation\x12\x33\n\tcontainer\x18\x08 \x01(\x0b\x32 .google.privacy.dlp.v2.Container"\x97\x03\n\x0f\x43ontentLocation\x12\x16\n\x0e\x63ontainer_name\x18\x01 \x01(\t\x12@\n\x0frecord_location\x18\x02 \x01(\x0b\x32%.google.privacy.dlp.v2.RecordLocationH\x00\x12>\n\x0eimage_location\x18\x03 \x01(\x0b\x32$.google.privacy.dlp.v2.ImageLocationH\x00\x12\x44\n\x11\x64ocument_location\x18\x05 \x01(\x0b\x32\'.google.privacy.dlp.v2.DocumentLocationH\x00\x12\x44\n\x11metadata_location\x18\x08 \x01(\x0b\x32\'.google.privacy.dlp.v2.MetadataLocationH\x00\x12\x37\n\x13\x63ontainer_timestamp\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x19\n\x11\x63ontainer_version\x18\x07 \x01(\tB\n\n\x08location"\x94\x01\n\x10MetadataLocation\x12\x31\n\x04type\x18\x01 \x01(\x0e\x32#.google.privacy.dlp.v2.MetadataType\x12\x44\n\rstorage_label\x18\x03 \x01(\x0b\x32+.google.privacy.dlp.v2.StorageMetadataLabelH\x00\x42\x07\n\x05label"#\n\x14StorageMetadataLabel\x12\x0b\n\x03key\x18\x01 \x01(\t"\'\n\x10\x44ocumentLocation\x12\x13\n\x0b\x66ile_offset\x18\x01 \x01(\x03"\xb6\x01\n\x0eRecordLocation\x12\x34\n\nrecord_key\x18\x01 \x01(\x0b\x32 .google.privacy.dlp.v2.RecordKey\x12\x30\n\x08\x66ield_id\x18\x02 \x01(\x0b\x32\x1e.google.privacy.dlp.v2.FieldId\x12<\n\x0etable_location\x18\x03 \x01(\x0b\x32$.google.privacy.dlp.v2.TableLocation""\n\rTableLocation\x12\x11\n\trow_index\x18\x01 \x01(\x03"\xac\x01\n\tContainer\x12\x0c\n\x04type\x18\x01 \x01(\t\x12\x12\n\nproject_id\x18\x02 \x01(\t\x12\x11\n\tfull_path\x18\x03 \x01(\t\x12\x11\n\troot_path\x18\x04 \x01(\t\x12\x15\n\rrelative_path\x18\x05 \x01(\t\x12/\n\x0bupdate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x0f\n\x07version\x18\x07 \x01(\t"#\n\x05Range\x12\r\n\x05start\x18\x01 \x01(\x03\x12\x0b\n\x03\x65nd\x18\x02 \x01(\x03"K\n\rImageLocation\x12:\n\x0e\x62ounding_boxes\x18\x01 \x03(\x0b\x32".google.privacy.dlp.v2.BoundingBox"G\n\x0b\x42oundingBox\x12\x0b\n\x03top\x18\x01 \x01(\x05\x12\x0c\n\x04left\x18\x02 \x01(\x05\x12\r\n\x05width\x18\x03 \x01(\x05\x12\x0e\n\x06height\x18\x04 \x01(\x05"\x8a\x04\n\x12RedactImageRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x13\n\x0blocation_id\x18\x08 \x01(\t\x12<\n\x0einspect_config\x18\x02 \x01(\x0b\x32$.google.privacy.dlp.v2.InspectConfig\x12_\n\x17image_redaction_configs\x18\x05 \x03(\x0b\x32>.google.privacy.dlp.v2.RedactImageRequest.ImageRedactionConfig\x12\x18\n\x10include_findings\x18\x06 \x01(\x08\x12\x39\n\tbyte_item\x18\x07 \x01(\x0b\x32&.google.privacy.dlp.v2.ByteContentItem\x1a\xa8\x01\n\x14ImageRedactionConfig\x12\x34\n\tinfo_type\x18\x01 \x01(\x0b\x32\x1f.google.privacy.dlp.v2.InfoTypeH\x00\x12\x19\n\x0fredact_all_text\x18\x02 \x01(\x08H\x00\x12\x35\n\x0fredaction_color\x18\x03 \x01(\x0b\x32\x1c.google.privacy.dlp.v2.ColorB\x08\n\x06target"1\n\x05\x43olor\x12\x0b\n\x03red\x18\x01 \x01(\x02\x12\r\n\x05green\x18\x02 \x01(\x02\x12\x0c\n\x04\x62lue\x18\x03 \x01(\x02"\x83\x01\n\x13RedactImageResponse\x12\x16\n\x0eredacted_image\x18\x01 \x01(\x0c\x12\x16\n\x0e\x65xtracted_text\x18\x02 \x01(\t\x12<\n\x0einspect_result\x18\x03 \x01(\x0b\x32$.google.privacy.dlp.v2.InspectResult"\xe6\x02\n\x18\x44\x65identifyContentRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x42\n\x11\x64\x65identify_config\x18\x02 \x01(\x0b\x32\'.google.privacy.dlp.v2.DeidentifyConfig\x12<\n\x0einspect_config\x18\x03 \x01(\x0b\x32$.google.privacy.dlp.v2.InspectConfig\x12\x30\n\x04item\x18\x04 \x01(\x0b\x32".google.privacy.dlp.v2.ContentItem\x12\x1d\n\x15inspect_template_name\x18\x05 \x01(\t\x12 \n\x18\x64\x65identify_template_name\x18\x06 \x01(\t\x12\x13\n\x0blocation_id\x18\x07 \x01(\t"\x8e\x01\n\x19\x44\x65identifyContentResponse\x12\x30\n\x04item\x18\x01 \x01(\x0b\x32".google.privacy.dlp.v2.ContentItem\x12?\n\x08overview\x18\x02 \x01(\x0b\x32-.google.privacy.dlp.v2.TransformationOverview"\xe9\x02\n\x18ReidentifyContentRequest\x12\x43\n\x06parent\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x42\n\x11reidentify_config\x18\x02 \x01(\x0b\x32\'.google.privacy.dlp.v2.DeidentifyConfig\x12<\n\x0einspect_config\x18\x03 \x01(\x0b\x32$.google.privacy.dlp.v2.InspectConfig\x12\x30\n\x04item\x18\x04 \x01(\x0b\x32".google.privacy.dlp.v2.ContentItem\x12\x1d\n\x15inspect_template_name\x18\x05 \x01(\t\x12 \n\x18reidentify_template_name\x18\x06 \x01(\t\x12\x13\n\x0blocation_id\x18\x07 \x01(\t"\x8e\x01\n\x19ReidentifyContentResponse\x12\x30\n\x04item\x18\x01 \x01(\x0b\x32".google.privacy.dlp.v2.ContentItem\x12?\n\x08overview\x18\x02 \x01(\x0b\x32-.google.privacy.dlp.v2.TransformationOverview"\xfd\x01\n\x15InspectContentRequest\x12@\n\x06parent\x18\x01 \x01(\tB0\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12<\n\x0einspect_config\x18\x02 \x01(\x0b\x32$.google.privacy.dlp.v2.InspectConfig\x12\x30\n\x04item\x18\x03 \x01(\x0b\x32".google.privacy.dlp.v2.ContentItem\x12\x1d\n\x15inspect_template_name\x18\x04 \x01(\t\x12\x13\n\x0blocation_id\x18\x05 \x01(\t"N\n\x16InspectContentResponse\x12\x34\n\x06result\x18\x01 \x01(\x0b\x32$.google.privacy.dlp.v2.InspectResult"\xb7\x02\n\x13OutputStorageConfig\x12\x35\n\x05table\x18\x01 \x01(\x0b\x32$.google.privacy.dlp.v2.BigQueryTableH\x00\x12N\n\routput_schema\x18\x03 \x01(\x0e\x32\x37.google.privacy.dlp.v2.OutputStorageConfig.OutputSchema"\x90\x01\n\x0cOutputSchema\x12\x1d\n\x19OUTPUT_SCHEMA_UNSPECIFIED\x10\x00\x12\x11\n\rBASIC_COLUMNS\x10\x01\x12\x0f\n\x0bGCS_COLUMNS\x10\x02\x12\x15\n\x11\x44\x41TASTORE_COLUMNS\x10\x03\x12\x15\n\x11\x42IG_QUERY_COLUMNS\x10\x04\x12\x0f\n\x0b\x41LL_COLUMNS\x10\x05\x42\x06\n\x04type"R\n\rInfoTypeStats\x12\x32\n\tinfo_type\x18\x01 \x01(\x0b\x32\x1f.google.privacy.dlp.v2.InfoType\x12\r\n\x05\x63ount\x18\x02 \x01(\x03"\xa4\x04\n\x18InspectDataSourceDetails\x12[\n\x11requested_options\x18\x02 \x01(\x0b\x32@.google.privacy.dlp.v2.InspectDataSourceDetails.RequestedOptions\x12\x46\n\x06result\x18\x03 \x01(\x0b\x32\x36.google.privacy.dlp.v2.InspectDataSourceDetails.Result\x1a\x9a\x01\n\x10RequestedOptions\x12I\n\x19snapshot_inspect_template\x18\x01 \x01(\x0b\x32&.google.privacy.dlp.v2.InspectTemplate\x12;\n\njob_config\x18\x03 \x01(\x0b\x32\'.google.privacy.dlp.v2.InspectJobConfig\x1a\xc5\x01\n\x06Result\x12\x17\n\x0fprocessed_bytes\x18\x01 \x01(\x03\x12\x1d\n\x15total_estimated_bytes\x18\x02 \x01(\x03\x12=\n\x0finfo_type_stats\x18\x03 \x03(\x0b\x32$.google.privacy.dlp.v2.InfoTypeStats\x12\x44\n\x0chybrid_stats\x18\x07 \x01(\x0b\x32..google.privacy.dlp.v2.HybridInspectStatistics"`\n\x17HybridInspectStatistics\x12\x17\n\x0fprocessed_count\x18\x01 \x01(\x03\x12\x15\n\raborted_count\x18\x02 \x01(\x03\x12\x15\n\rpending_count\x18\x03 \x01(\x03"\x90\x01\n\x13InfoTypeDescription\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12@\n\x0csupported_by\x18\x03 \x03(\x0e\x32*.google.privacy.dlp.v2.InfoTypeSupportedBy\x12\x13\n\x0b\x64\x65scription\x18\x04 \x01(\t"b\n\x14ListInfoTypesRequest\x12\x0e\n\x06parent\x18\x04 \x01(\t\x12\x15\n\rlanguage_code\x18\x01 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x02 \x01(\t\x12\x13\n\x0blocation_id\x18\x03 \x01(\t"W\n\x15ListInfoTypesResponse\x12>\n\ninfo_types\x18\x01 \x03(\x0b\x32*.google.privacy.dlp.v2.InfoTypeDescription"\xc1\x01\n\x15RiskAnalysisJobConfig\x12<\n\x0eprivacy_metric\x18\x01 \x01(\x0b\x32$.google.privacy.dlp.v2.PrivacyMetric\x12:\n\x0csource_table\x18\x02 \x01(\x0b\x32$.google.privacy.dlp.v2.BigQueryTable\x12.\n\x07\x61\x63tions\x18\x03 \x03(\x0b\x32\x1d.google.privacy.dlp.v2.Action"\xbc\x01\n\x07QuasiId\x12\x32\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1e.google.privacy.dlp.v2.FieldIdB\x03\xe0\x41\x02\x12\x34\n\tinfo_type\x18\x02 \x01(\x0b\x32\x1f.google.privacy.dlp.v2.InfoTypeH\x00\x12\x14\n\ncustom_tag\x18\x03 \x01(\tH\x00\x12*\n\x08inferred\x18\x04 \x01(\x0b\x32\x16.google.protobuf.EmptyH\x00\x42\x05\n\x03tag"\xbe\x02\n\x10StatisticalTable\x12\x38\n\x05table\x18\x03 \x01(\x0b\x32$.google.privacy.dlp.v2.BigQueryTableB\x03\xe0\x41\x02\x12T\n\tquasi_ids\x18\x01 \x03(\x0b\x32<.google.privacy.dlp.v2.StatisticalTable.QuasiIdentifierFieldB\x03\xe0\x41\x02\x12?\n\x12relative_frequency\x18\x02 \x01(\x0b\x32\x1e.google.privacy.dlp.v2.FieldIdB\x03\xe0\x41\x02\x1aY\n\x14QuasiIdentifierField\x12-\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1e.google.privacy.dlp.v2.FieldId\x12\x12\n\ncustom_tag\x18\x02 \x01(\t"\x93\x0f\n\rPrivacyMetric\x12[\n\x16numerical_stats_config\x18\x01 \x01(\x0b\x32\x39.google.privacy.dlp.v2.PrivacyMetric.NumericalStatsConfigH\x00\x12_\n\x18\x63\x61tegorical_stats_config\x18\x02 \x01(\x0b\x32;.google.privacy.dlp.v2.PrivacyMetric.CategoricalStatsConfigH\x00\x12S\n\x12k_anonymity_config\x18\x03 \x01(\x0b\x32\x35.google.privacy.dlp.v2.PrivacyMetric.KAnonymityConfigH\x00\x12S\n\x12l_diversity_config\x18\x04 \x01(\x0b\x32\x35.google.privacy.dlp.v2.PrivacyMetric.LDiversityConfigH\x00\x12\\\n\x17k_map_estimation_config\x18\x05 \x01(\x0b\x32\x39.google.privacy.dlp.v2.PrivacyMetric.KMapEstimationConfigH\x00\x12n\n delta_presence_estimation_config\x18\x06 \x01(\x0b\x32\x42.google.privacy.dlp.v2.PrivacyMetric.DeltaPresenceEstimationConfigH\x00\x1a\x45\n\x14NumericalStatsConfig\x12-\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1e.google.privacy.dlp.v2.FieldId\x1aG\n\x16\x43\x61tegoricalStatsConfig\x12-\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1e.google.privacy.dlp.v2.FieldId\x1ay\n\x10KAnonymityConfig\x12\x31\n\tquasi_ids\x18\x01 \x03(\x0b\x32\x1e.google.privacy.dlp.v2.FieldId\x12\x32\n\tentity_id\x18\x02 \x01(\x0b\x32\x1f.google.privacy.dlp.v2.EntityId\x1a\x82\x01\n\x10LDiversityConfig\x12\x31\n\tquasi_ids\x18\x01 \x03(\x0b\x32\x1e.google.privacy.dlp.v2.FieldId\x12;\n\x13sensitive_attribute\x18\x02 \x01(\x0b\x32\x1e.google.privacy.dlp.v2.FieldId\x1a\x81\x06\n\x14KMapEstimationConfig\x12]\n\tquasi_ids\x18\x01 \x03(\x0b\x32\x45.google.privacy.dlp.v2.PrivacyMetric.KMapEstimationConfig.TaggedFieldB\x03\xe0\x41\x02\x12\x13\n\x0bregion_code\x18\x02 \x01(\t\x12\x62\n\x10\x61uxiliary_tables\x18\x03 \x03(\x0b\x32H.google.privacy.dlp.v2.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable\x1a\xc0\x01\n\x0bTaggedField\x12\x32\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1e.google.privacy.dlp.v2.FieldIdB\x03\xe0\x41\x02\x12\x34\n\tinfo_type\x18\x02 \x01(\x0b\x32\x1f.google.privacy.dlp.v2.InfoTypeH\x00\x12\x14\n\ncustom_tag\x18\x03 \x01(\tH\x00\x12*\n\x08inferred\x18\x04 \x01(\x0b\x32\x16.google.protobuf.EmptyH\x00\x42\x05\n\x03tag\x1a\xcd\x02\n\x0e\x41uxiliaryTable\x12\x38\n\x05table\x18\x03 \x01(\x0b\x32$.google.privacy.dlp.v2.BigQueryTableB\x03\xe0\x41\x02\x12m\n\tquasi_ids\x18\x01 \x03(\x0b\x32U.google.privacy.dlp.v2.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdFieldB\x03\xe0\x41\x02\x12?\n\x12relative_frequency\x18\x02 \x01(\x0b\x32\x1e.google.privacy.dlp.v2.FieldIdB\x03\xe0\x41\x02\x1aQ\n\x0cQuasiIdField\x12-\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1e.google.privacy.dlp.v2.FieldId\x12\x12\n\ncustom_tag\x18\x02 \x01(\t\x1a\xaf\x01\n\x1d\x44\x65ltaPresenceEstimationConfig\x12\x36\n\tquasi_ids\x18\x01 \x03(\x0b\x32\x1e.google.privacy.dlp.v2.QuasiIdB\x03\xe0\x41\x02\x12\x13\n\x0bregion_code\x18\x02 \x01(\t\x12\x41\n\x10\x61uxiliary_tables\x18\x03 \x03(\x0b\x32\'.google.privacy.dlp.v2.StatisticalTableB\x06\n\x04type"\xf2\x1d\n\x1c\x41nalyzeDataSourceRiskDetails\x12\x46\n\x18requested_privacy_metric\x18\x01 \x01(\x0b\x32$.google.privacy.dlp.v2.PrivacyMetric\x12\x44\n\x16requested_source_table\x18\x02 \x01(\x0b\x32$.google.privacy.dlp.v2.BigQueryTable\x12j\n\x16numerical_stats_result\x18\x03 \x01(\x0b\x32H.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.NumericalStatsResultH\x00\x12n\n\x18\x63\x61tegorical_stats_result\x18\x04 \x01(\x0b\x32J.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.CategoricalStatsResultH\x00\x12\x62\n\x12k_anonymity_result\x18\x05 \x01(\x0b\x32\x44.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KAnonymityResultH\x00\x12\x62\n\x12l_diversity_result\x18\x06 \x01(\x0b\x32\x44.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.LDiversityResultH\x00\x12k\n\x17k_map_estimation_result\x18\x07 \x01(\x0b\x32H.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KMapEstimationResultH\x00\x12}\n delta_presence_estimation_result\x18\t \x01(\x0b\x32Q.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResultH\x00\x1a\xaf\x01\n\x14NumericalStatsResult\x12/\n\tmin_value\x18\x01 \x01(\x0b\x32\x1c.google.privacy.dlp.v2.Value\x12/\n\tmax_value\x18\x02 \x01(\x0b\x32\x1c.google.privacy.dlp.v2.Value\x12\x35\n\x0fquantile_values\x18\x04 \x03(\x0b\x32\x1c.google.privacy.dlp.v2.Value\x1a\x8d\x03\n\x16\x43\x61tegoricalStatsResult\x12\x95\x01\n!value_frequency_histogram_buckets\x18\x05 \x03(\x0b\x32j.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket\x1a\xda\x01\n\x1f\x43\x61tegoricalStatsHistogramBucket\x12#\n\x1bvalue_frequency_lower_bound\x18\x01 \x01(\x03\x12#\n\x1bvalue_frequency_upper_bound\x18\x02 \x01(\x03\x12\x13\n\x0b\x62ucket_size\x18\x03 \x01(\x03\x12<\n\rbucket_values\x18\x04 \x03(\x0b\x32%.google.privacy.dlp.v2.ValueFrequency\x12\x1a\n\x12\x62ucket_value_count\x18\x05 \x01(\x03\x1a\xb5\x04\n\x10KAnonymityResult\x12\x8b\x01\n#equivalence_class_histogram_buckets\x18\x05 \x03(\x0b\x32^.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket\x1at\n\x1aKAnonymityEquivalenceClass\x12\x36\n\x10quasi_ids_values\x18\x01 \x03(\x0b\x32\x1c.google.privacy.dlp.v2.Value\x12\x1e\n\x16\x65quivalence_class_size\x18\x02 \x01(\x03\x1a\x9c\x02\n\x19KAnonymityHistogramBucket\x12*\n"equivalence_class_size_lower_bound\x18\x01 \x01(\x03\x12*\n"equivalence_class_size_upper_bound\x18\x02 \x01(\x03\x12\x13\n\x0b\x62ucket_size\x18\x03 \x01(\x03\x12v\n\rbucket_values\x18\x04 \x03(\x0b\x32_.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass\x12\x1a\n\x12\x62ucket_value_count\x18\x05 \x01(\x03\x1a\xb0\x05\n\x10LDiversityResult\x12\x93\x01\n+sensitive_value_frequency_histogram_buckets\x18\x05 \x03(\x0b\x32^.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket\x1a\xe0\x01\n\x1aLDiversityEquivalenceClass\x12\x36\n\x10quasi_ids_values\x18\x01 \x03(\x0b\x32\x1c.google.privacy.dlp.v2.Value\x12\x1e\n\x16\x65quivalence_class_size\x18\x02 \x01(\x03\x12%\n\x1dnum_distinct_sensitive_values\x18\x03 \x01(\x03\x12\x43\n\x14top_sensitive_values\x18\x04 \x03(\x0b\x32%.google.privacy.dlp.v2.ValueFrequency\x1a\xa2\x02\n\x19LDiversityHistogramBucket\x12-\n%sensitive_value_frequency_lower_bound\x18\x01 \x01(\x03\x12-\n%sensitive_value_frequency_upper_bound\x18\x02 \x01(\x03\x12\x13\n\x0b\x62ucket_size\x18\x03 \x01(\x03\x12v\n\rbucket_values\x18\x04 \x03(\x0b\x32_.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass\x12\x1a\n\x12\x62ucket_value_count\x18\x05 \x01(\x03\x1a\x95\x04\n\x14KMapEstimationResult\x12\x8a\x01\n\x1ak_map_estimation_histogram\x18\x01 \x03(\x0b\x32\x66.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket\x1ar\n\x1bKMapEstimationQuasiIdValues\x12\x36\n\x10quasi_ids_values\x18\x01 \x03(\x0b\x32\x1c.google.privacy.dlp.v2.Value\x12\x1b\n\x13\x65stimated_anonymity\x18\x02 \x01(\x03\x1a\xfb\x01\n\x1dKMapEstimationHistogramBucket\x12\x15\n\rmin_anonymity\x18\x01 \x01(\x03\x12\x15\n\rmax_anonymity\x18\x02 \x01(\x03\x12\x13\n\x0b\x62ucket_size\x18\x05 \x01(\x03\x12{\n\rbucket_values\x18\x06 \x03(\x0b\x32\x64.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues\x12\x1a\n\x12\x62ucket_value_count\x18\x07 \x01(\x03\x1a\xe4\x04\n\x1d\x44\x65ltaPresenceEstimationResult\x12\xa5\x01\n#delta_presence_estimation_histogram\x18\x01 \x03(\x0b\x32x.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket\x1a}\n$DeltaPresenceEstimationQuasiIdValues\x12\x36\n\x10quasi_ids_values\x18\x01 \x03(\x0b\x32\x1c.google.privacy.dlp.v2.Value\x12\x1d\n\x15\x65stimated_probability\x18\x02 \x01(\x01\x1a\x9b\x02\n&DeltaPresenceEstimationHistogramBucket\x12\x17\n\x0fmin_probability\x18\x01 \x01(\x01\x12\x17\n\x0fmax_probability\x18\x02 \x01(\x01\x12\x13\n\x0b\x62ucket_size\x18\x05 \x01(\x03\x12\x8d\x01\n\rbucket_values\x18\x06 \x03(\x0b\x32v.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues\x12\x1a\n\x12\x62ucket_value_count\x18\x07 \x01(\x03\x42\x08\n\x06result"L\n\x0eValueFrequency\x12+\n\x05value\x18\x01 \x01(\x0b\x32\x1c.google.privacy.dlp.v2.Value\x12\r\n\x05\x63ount\x18\x02 \x01(\x03"\xb3\x02\n\x05Value\x12\x17\n\rinteger_value\x18\x01 \x01(\x03H\x00\x12\x15\n\x0b\x66loat_value\x18\x02 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x03 \x01(\tH\x00\x12\x17\n\rboolean_value\x18\x04 \x01(\x08H\x00\x12\x35\n\x0ftimestamp_value\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x12,\n\ntime_value\x18\x06 \x01(\x0b\x32\x16.google.type.TimeOfDayH\x00\x12\'\n\ndate_value\x18\x07 \x01(\x0b\x32\x11.google.type.DateH\x00\x12\x33\n\x11\x64\x61y_of_week_value\x18\x08 \x01(\x0e\x32\x16.google.type.DayOfWeekH\x00\x42\x06\n\x04type"Q\n\tQuoteInfo\x12\x34\n\tdate_time\x18\x02 \x01(\x0b\x32\x1f.google.privacy.dlp.v2.DateTimeH\x00\x42\x0e\n\x0cparsed_quote"\xdf\x01\n\x08\x44\x61teTime\x12\x1f\n\x04\x64\x61te\x18\x01 \x01(\x0b\x32\x11.google.type.Date\x12+\n\x0b\x64\x61y_of_week\x18\x02 \x01(\x0e\x32\x16.google.type.DayOfWeek\x12$\n\x04time\x18\x03 \x01(\x0b\x32\x16.google.type.TimeOfDay\x12;\n\ttime_zone\x18\x04 \x01(\x0b\x32(.google.privacy.dlp.v2.DateTime.TimeZone\x1a"\n\x08TimeZone\x12\x16\n\x0eoffset_minutes\x18\x01 \x01(\x05"\xa4\x02\n\x10\x44\x65identifyConfig\x12S\n\x19info_type_transformations\x18\x01 \x01(\x0b\x32..google.privacy.dlp.v2.InfoTypeTransformationsH\x00\x12N\n\x16record_transformations\x18\x02 \x01(\x0b\x32,.google.privacy.dlp.v2.RecordTransformationsH\x00\x12Y\n\x1dtransformation_error_handling\x18\x03 \x01(\x0b\x32\x32.google.privacy.dlp.v2.TransformationErrorHandlingB\x10\n\x0etransformation"\x85\x02\n\x1bTransformationErrorHandling\x12T\n\x0bthrow_error\x18\x01 \x01(\x0b\x32=.google.privacy.dlp.v2.TransformationErrorHandling.ThrowErrorH\x00\x12\x64\n\x13leave_untransformed\x18\x02 \x01(\x0b\x32\x45.google.privacy.dlp.v2.TransformationErrorHandling.LeaveUntransformedH\x00\x1a\x0c\n\nThrowError\x1a\x14\n\x12LeaveUntransformedB\x06\n\x04mode"\xf5\x06\n\x17PrimitiveTransformation\x12\x43\n\x0ereplace_config\x18\x01 \x01(\x0b\x32).google.privacy.dlp.v2.ReplaceValueConfigH\x00\x12<\n\rredact_config\x18\x02 \x01(\x0b\x32#.google.privacy.dlp.v2.RedactConfigH\x00\x12K\n\x15\x63haracter_mask_config\x18\x03 \x01(\x0b\x32*.google.privacy.dlp.v2.CharacterMaskConfigH\x00\x12Y\n\x1d\x63rypto_replace_ffx_fpe_config\x18\x04 \x01(\x0b\x32\x30.google.privacy.dlp.v2.CryptoReplaceFfxFpeConfigH\x00\x12V\n\x1b\x66ixed_size_bucketing_config\x18\x05 \x01(\x0b\x32/.google.privacy.dlp.v2.FixedSizeBucketingConfigH\x00\x12\x42\n\x10\x62ucketing_config\x18\x06 \x01(\x0b\x32&.google.privacy.dlp.v2.BucketingConfigH\x00\x12Y\n\x1dreplace_with_info_type_config\x18\x07 \x01(\x0b\x32\x30.google.privacy.dlp.v2.ReplaceWithInfoTypeConfigH\x00\x12\x41\n\x10time_part_config\x18\x08 \x01(\x0b\x32%.google.privacy.dlp.v2.TimePartConfigH\x00\x12\x45\n\x12\x63rypto_hash_config\x18\t \x01(\x0b\x32\'.google.privacy.dlp.v2.CryptoHashConfigH\x00\x12\x43\n\x11\x64\x61te_shift_config\x18\x0b \x01(\x0b\x32&.google.privacy.dlp.v2.DateShiftConfigH\x00\x12W\n\x1b\x63rypto_deterministic_config\x18\x0c \x01(\x0b\x32\x30.google.privacy.dlp.v2.CryptoDeterministicConfigH\x00\x42\x10\n\x0etransformation"\xdc\x01\n\x0eTimePartConfig\x12G\n\x0fpart_to_extract\x18\x01 \x01(\x0e\x32..google.privacy.dlp.v2.TimePartConfig.TimePart"\x80\x01\n\x08TimePart\x12\x19\n\x15TIME_PART_UNSPECIFIED\x10\x00\x12\x08\n\x04YEAR\x10\x01\x12\t\n\x05MONTH\x10\x02\x12\x10\n\x0c\x44\x41Y_OF_MONTH\x10\x03\x12\x0f\n\x0b\x44\x41Y_OF_WEEK\x10\x04\x12\x10\n\x0cWEEK_OF_YEAR\x10\x05\x12\x0f\n\x0bHOUR_OF_DAY\x10\x06"H\n\x10\x43ryptoHashConfig\x12\x34\n\ncrypto_key\x18\x01 \x01(\x0b\x32 .google.privacy.dlp.v2.CryptoKey"\xc0\x01\n\x19\x43ryptoDeterministicConfig\x12\x34\n\ncrypto_key\x18\x01 \x01(\x0b\x32 .google.privacy.dlp.v2.CryptoKey\x12<\n\x13surrogate_info_type\x18\x02 \x01(\x0b\x32\x1f.google.privacy.dlp.v2.InfoType\x12/\n\x07\x63ontext\x18\x03 \x01(\x0b\x32\x1e.google.privacy.dlp.v2.FieldId"E\n\x12ReplaceValueConfig\x12/\n\tnew_value\x18\x01 \x01(\x0b\x32\x1c.google.privacy.dlp.v2.Value"\x1b\n\x19ReplaceWithInfoTypeConfig"\x0e\n\x0cRedactConfig"\xb6\x02\n\rCharsToIgnore\x12\x1c\n\x12\x63haracters_to_skip\x18\x01 \x01(\tH\x00\x12_\n\x1b\x63ommon_characters_to_ignore\x18\x02 \x01(\x0e\x32\x38.google.privacy.dlp.v2.CharsToIgnore.CommonCharsToIgnoreH\x00"\x97\x01\n\x13\x43ommonCharsToIgnore\x12&\n"COMMON_CHARS_TO_IGNORE_UNSPECIFIED\x10\x00\x12\x0b\n\x07NUMERIC\x10\x01\x12\x14\n\x10\x41LPHA_UPPER_CASE\x10\x02\x12\x14\n\x10\x41LPHA_LOWER_CASE\x10\x03\x12\x0f\n\x0bPUNCTUATION\x10\x04\x12\x0e\n\nWHITESPACE\x10\x05\x42\x0c\n\ncharacters"\xa3\x01\n\x13\x43haracterMaskConfig\x12\x19\n\x11masking_character\x18\x01 \x01(\t\x12\x16\n\x0enumber_to_mask\x18\x02 \x01(\x05\x12\x15\n\rreverse_order\x18\x03 \x01(\x08\x12\x42\n\x14\x63haracters_to_ignore\x18\x04 \x03(\x0b\x32$.google.privacy.dlp.v2.CharsToIgnore"\xa4\x01\n\x18\x46ixedSizeBucketingConfig\x12\x36\n\x0blower_bound\x18\x01 \x01(\x0b\x32\x1c.google.privacy.dlp.v2.ValueB\x03\xe0\x41\x02\x12\x36\n\x0bupper_bound\x18\x02 \x01(\x0b\x32\x1c.google.privacy.dlp.v2.ValueB\x03\xe0\x41\x02\x12\x18\n\x0b\x62ucket_size\x18\x03 \x01(\x01\x42\x03\xe0\x41\x02"\xeb\x01\n\x0f\x42ucketingConfig\x12>\n\x07\x62uckets\x18\x01 \x03(\x0b\x32-.google.privacy.dlp.v2.BucketingConfig.Bucket\x1a\x97\x01\n\x06\x42ucket\x12)\n\x03min\x18\x01 \x01(\x0b\x32\x1c.google.privacy.dlp.v2.Value\x12)\n\x03max\x18\x02 \x01(\x0b\x32\x1c.google.privacy.dlp.v2.Value\x12\x37\n\x11replacement_value\x18\x03 \x01(\x0b\x32\x1c.google.privacy.dlp.v2.Value"\xf9\x03\n\x19\x43ryptoReplaceFfxFpeConfig\x12\x39\n\ncrypto_key\x18\x01 \x01(\x0b\x32 .google.privacy.dlp.v2.CryptoKeyB\x03\xe0\x41\x02\x12/\n\x07\x63ontext\x18\x02 \x01(\x0b\x32\x1e.google.privacy.dlp.v2.FieldId\x12\x63\n\x0f\x63ommon_alphabet\x18\x04 \x01(\x0e\x32H.google.privacy.dlp.v2.CryptoReplaceFfxFpeConfig.FfxCommonNativeAlphabetH\x00\x12\x19\n\x0f\x63ustom_alphabet\x18\x05 \x01(\tH\x00\x12\x0f\n\x05radix\x18\x06 \x01(\x05H\x00\x12<\n\x13surrogate_info_type\x18\x08 \x01(\x0b\x32\x1f.google.privacy.dlp.v2.InfoType"\x94\x01\n\x17\x46\x66xCommonNativeAlphabet\x12*\n&FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED\x10\x00\x12\x0b\n\x07NUMERIC\x10\x01\x12\x0f\n\x0bHEXADECIMAL\x10\x02\x12\x1c\n\x18UPPER_CASE_ALPHA_NUMERIC\x10\x03\x12\x11\n\rALPHA_NUMERIC\x10\x04\x42\n\n\x08\x61lphabet"\xd8\x01\n\tCryptoKey\x12>\n\ttransient\x18\x01 \x01(\x0b\x32).google.privacy.dlp.v2.TransientCryptoKeyH\x00\x12>\n\tunwrapped\x18\x02 \x01(\x0b\x32).google.privacy.dlp.v2.UnwrappedCryptoKeyH\x00\x12\x41\n\x0bkms_wrapped\x18\x03 \x01(\x0b\x32*.google.privacy.dlp.v2.KmsWrappedCryptoKeyH\x00\x42\x08\n\x06source"\'\n\x12TransientCryptoKey\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02"&\n\x12UnwrappedCryptoKey\x12\x10\n\x03key\x18\x01 \x01(\x0c\x42\x03\xe0\x41\x02"M\n\x13KmsWrappedCryptoKey\x12\x18\n\x0bwrapped_key\x18\x01 \x01(\x0c\x42\x03\xe0\x41\x02\x12\x1c\n\x0f\x63rypto_key_name\x18\x02 \x01(\tB\x03\xe0\x41\x02"\xc2\x01\n\x0f\x44\x61teShiftConfig\x12\x1d\n\x10upper_bound_days\x18\x01 \x01(\x05\x42\x03\xe0\x41\x02\x12\x1d\n\x10lower_bound_days\x18\x02 \x01(\x05\x42\x03\xe0\x41\x02\x12/\n\x07\x63ontext\x18\x03 \x01(\x0b\x32\x1e.google.privacy.dlp.v2.FieldId\x12\x36\n\ncrypto_key\x18\x04 \x01(\x0b\x32 .google.privacy.dlp.v2.CryptoKeyH\x00\x42\x08\n\x06method"\xa5\x02\n\x17InfoTypeTransformations\x12\x63\n\x0ftransformations\x18\x01 \x03(\x0b\x32\x45.google.privacy.dlp.v2.InfoTypeTransformations.InfoTypeTransformationB\x03\xe0\x41\x02\x1a\xa4\x01\n\x16InfoTypeTransformation\x12\x33\n\ninfo_types\x18\x01 \x03(\x0b\x32\x1f.google.privacy.dlp.v2.InfoType\x12U\n\x18primitive_transformation\x18\x02 \x01(\x0b\x32..google.privacy.dlp.v2.PrimitiveTransformationB\x03\xe0\x41\x02"\xc0\x02\n\x13\x46ieldTransformation\x12\x33\n\x06\x66ields\x18\x01 \x03(\x0b\x32\x1e.google.privacy.dlp.v2.FieldIdB\x03\xe0\x41\x02\x12\x39\n\tcondition\x18\x03 \x01(\x0b\x32&.google.privacy.dlp.v2.RecordCondition\x12R\n\x18primitive_transformation\x18\x04 \x01(\x0b\x32..google.privacy.dlp.v2.PrimitiveTransformationH\x00\x12S\n\x19info_type_transformations\x18\x05 \x01(\x0b\x32..google.privacy.dlp.v2.InfoTypeTransformationsH\x00\x42\x10\n\x0etransformation"\xa9\x01\n\x15RecordTransformations\x12I\n\x15\x66ield_transformations\x18\x01 \x03(\x0b\x32*.google.privacy.dlp.v2.FieldTransformation\x12\x45\n\x13record_suppressions\x18\x02 \x03(\x0b\x32(.google.privacy.dlp.v2.RecordSuppression"N\n\x11RecordSuppression\x12\x39\n\tcondition\x18\x01 \x01(\x0b\x32&.google.privacy.dlp.v2.RecordCondition"\xdc\x04\n\x0fRecordCondition\x12G\n\x0b\x65xpressions\x18\x03 \x01(\x0b\x32\x32.google.privacy.dlp.v2.RecordCondition.Expressions\x1a\xae\x01\n\tCondition\x12\x32\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1e.google.privacy.dlp.v2.FieldIdB\x03\xe0\x41\x02\x12@\n\x08operator\x18\x03 \x01(\x0e\x32).google.privacy.dlp.v2.RelationalOperatorB\x03\xe0\x41\x02\x12+\n\x05value\x18\x04 \x01(\x0b\x32\x1c.google.privacy.dlp.v2.Value\x1aR\n\nConditions\x12\x44\n\nconditions\x18\x01 \x03(\x0b\x32\x30.google.privacy.dlp.v2.RecordCondition.Condition\x1a\xfa\x01\n\x0b\x45xpressions\x12\\\n\x10logical_operator\x18\x01 \x01(\x0e\x32\x42.google.privacy.dlp.v2.RecordCondition.Expressions.LogicalOperator\x12G\n\nconditions\x18\x03 \x01(\x0b\x32\x31.google.privacy.dlp.v2.RecordCondition.ConditionsH\x00"<\n\x0fLogicalOperator\x12 \n\x1cLOGICAL_OPERATOR_UNSPECIFIED\x10\x00\x12\x07\n\x03\x41ND\x10\x01\x42\x06\n\x04type"\x83\x01\n\x16TransformationOverview\x12\x19\n\x11transformed_bytes\x18\x02 \x01(\x03\x12N\n\x18transformation_summaries\x18\x03 \x03(\x0b\x32,.google.privacy.dlp.v2.TransformationSummary"\x9f\x05\n\x15TransformationSummary\x12\x32\n\tinfo_type\x18\x01 \x01(\x0b\x32\x1f.google.privacy.dlp.v2.InfoType\x12-\n\x05\x66ield\x18\x02 \x01(\x0b\x32\x1e.google.privacy.dlp.v2.FieldId\x12\x46\n\x0etransformation\x18\x03 \x01(\x0b\x32..google.privacy.dlp.v2.PrimitiveTransformation\x12I\n\x15\x66ield_transformations\x18\x05 \x03(\x0b\x32*.google.privacy.dlp.v2.FieldTransformation\x12\x41\n\x0frecord_suppress\x18\x06 \x01(\x0b\x32(.google.privacy.dlp.v2.RecordSuppression\x12K\n\x07results\x18\x04 \x03(\x0b\x32:.google.privacy.dlp.v2.TransformationSummary.SummaryResult\x12\x19\n\x11transformed_bytes\x18\x07 \x01(\x03\x1a\x84\x01\n\rSummaryResult\x12\r\n\x05\x63ount\x18\x01 \x01(\x03\x12S\n\x04\x63ode\x18\x02 \x01(\x0e\x32\x45.google.privacy.dlp.v2.TransformationSummary.TransformationResultCode\x12\x0f\n\x07\x64\x65tails\x18\x03 \x01(\t"^\n\x18TransformationResultCode\x12*\n&TRANSFORMATION_RESULT_CODE_UNSPECIFIED\x10\x00\x12\x0b\n\x07SUCCESS\x10\x01\x12\t\n\x05\x45RROR\x10\x02"U\n\x08Schedule\x12?\n\x1arecurrence_period_duration\x18\x01 \x01(\x0b\x32\x19.google.protobuf.DurationH\x00\x42\x08\n\x06option"\x08\n\x06Manual"\xc2\x04\n\x0fInspectTemplate\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x34\n\x0b\x63reate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x34\n\x0bupdate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12<\n\x0einspect_config\x18\x06 \x01(\x0b\x32$.google.privacy.dlp.v2.InspectConfig:\xc6\x02\xea\x41\xc2\x02\n"dlp.googleapis.com/InspectTemplate\x12@organizations/{organization}/inspectTemplates/{inspect_template}\x12\x36projects/{project}/inspectTemplates/{inspect_template}\x12Uorganizations/{organization}/locations/{location}/inspectTemplates/{inspect_template}\x12Kprojects/{project}/locations/{location}/inspectTemplates/{inspect_template}"\xe6\x04\n\x12\x44\x65identifyTemplate\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x03\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x03 \x01(\t\x12\x34\n\x0b\x63reate_time\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x34\n\x0bupdate_time\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x42\n\x11\x64\x65identify_config\x18\x06 \x01(\x0b\x32\'.google.privacy.dlp.v2.DeidentifyConfig:\xe1\x02\xea\x41\xdd\x02\n%dlp.googleapis.com/DeidentifyTemplate\x12\x46organizations/{organization}/deidentifyTemplates/{deidentify_template}\x12\n\x0binspect_job\x18\x04 \x01(\x0b\x32\'.google.privacy.dlp.v2.InspectJobConfigH\x00\x12;\n\x08triggers\x18\x05 \x03(\x0b\x32).google.privacy.dlp.v2.JobTrigger.Trigger\x12\x31\n\x06\x65rrors\x18\x06 \x03(\x0b\x32\x1c.google.privacy.dlp.v2.ErrorB\x03\xe0\x41\x03\x12\x34\n\x0b\x63reate_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x34\n\x0bupdate_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12\x36\n\rlast_run_time\x18\t \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x03\xe0\x41\x03\x12=\n\x06status\x18\n \x01(\x0e\x32(.google.privacy.dlp.v2.JobTrigger.StatusB\x03\xe0\x41\x02\x1az\n\x07Trigger\x12\x33\n\x08schedule\x18\x01 \x01(\x0b\x32\x1f.google.privacy.dlp.v2.ScheduleH\x00\x12/\n\x06manual\x18\x02 \x01(\x0b\x32\x1d.google.privacy.dlp.v2.ManualH\x00\x42\t\n\x07trigger"H\n\x06Status\x12\x16\n\x12STATUS_UNSPECIFIED\x10\x00\x12\x0b\n\x07HEALTHY\x10\x01\x12\n\n\x06PAUSED\x10\x02\x12\r\n\tCANCELLED\x10\x03:\x94\x01\xea\x41\x90\x01\n\x1d\x64lp.googleapis.com/JobTrigger\x12,projects/{project}/jobTriggers/{job_trigger}\x12\x41projects/{project}/locations/{location}/jobTriggers/{job_trigger}B\x05\n\x03job"\xf4\x05\n\x06\x41\x63tion\x12\x43\n\rsave_findings\x18\x01 \x01(\x0b\x32*.google.privacy.dlp.v2.Action.SaveFindingsH\x00\x12@\n\x07pub_sub\x18\x02 \x01(\x0b\x32-.google.privacy.dlp.v2.Action.PublishToPubSubH\x00\x12U\n\x17publish_summary_to_cscc\x18\x03 \x01(\x0b\x32\x32.google.privacy.dlp.v2.Action.PublishSummaryToCsccH\x00\x12q\n&publish_findings_to_cloud_data_catalog\x18\x05 \x01(\x0b\x32?.google.privacy.dlp.v2.Action.PublishFindingsToCloudDataCatalogH\x00\x12V\n\x17job_notification_emails\x18\x08 \x01(\x0b\x32\x33.google.privacy.dlp.v2.Action.JobNotificationEmailsH\x00\x12T\n\x16publish_to_stackdriver\x18\t \x01(\x0b\x32\x32.google.privacy.dlp.v2.Action.PublishToStackdriverH\x00\x1aQ\n\x0cSaveFindings\x12\x41\n\routput_config\x18\x01 \x01(\x0b\x32*.google.privacy.dlp.v2.OutputStorageConfig\x1a \n\x0fPublishToPubSub\x12\r\n\x05topic\x18\x01 \x01(\t\x1a\x16\n\x14PublishSummaryToCscc\x1a#\n!PublishFindingsToCloudDataCatalog\x1a\x17\n\x15JobNotificationEmails\x1a\x16\n\x14PublishToStackdriverB\x08\n\x06\x61\x63tion"\xcb\x01\n\x1c\x43reateInspectTemplateRequest\x12:\n\x06parent\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\x12"dlp.googleapis.com/InspectTemplate\x12\x45\n\x10inspect_template\x18\x02 \x01(\x0b\x32&.google.privacy.dlp.v2.InspectTemplateB\x03\xe0\x41\x02\x12\x13\n\x0btemplate_id\x18\x03 \x01(\t\x12\x13\n\x0blocation_id\x18\x04 \x01(\t"\xcb\x01\n\x1cUpdateInspectTemplateRequest\x12\x38\n\x04name\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"dlp.googleapis.com/InspectTemplate\x12@\n\x10inspect_template\x18\x02 \x01(\x0b\x32&.google.privacy.dlp.v2.InspectTemplate\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"U\n\x19GetInspectTemplateRequest\x12\x38\n\x04name\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"dlp.googleapis.com/InspectTemplate"\xa7\x01\n\x1bListInspectTemplatesRequest\x12:\n\x06parent\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\x12"dlp.googleapis.com/InspectTemplate\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x10\n\x08order_by\x18\x04 \x01(\t\x12\x13\n\x0blocation_id\x18\x05 \x01(\t"z\n\x1cListInspectTemplatesResponse\x12\x41\n\x11inspect_templates\x18\x01 \x03(\x0b\x32&.google.privacy.dlp.v2.InspectTemplate\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"X\n\x1c\x44\x65leteInspectTemplateRequest\x12\x38\n\x04name\x18\x01 \x01(\tB*\xe0\x41\x02\xfa\x41$\n"dlp.googleapis.com/InspectTemplate"\xc4\x01\n\x17\x43reateJobTriggerRequest\x12\x43\n\x06parent\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12;\n\x0bjob_trigger\x18\x02 \x01(\x0b\x32!.google.privacy.dlp.v2.JobTriggerB\x03\xe0\x41\x02\x12\x12\n\ntrigger_id\x18\x03 \x01(\t\x12\x13\n\x0blocation_id\x18\x04 \x01(\t"P\n\x19\x41\x63tivateJobTriggerRequest\x12\x33\n\x04name\x18\x01 \x01(\tB%\xe0\x41\x02\xfa\x41\x1f\n\x1d\x64lp.googleapis.com/JobTrigger"\xb7\x01\n\x17UpdateJobTriggerRequest\x12\x33\n\x04name\x18\x01 \x01(\tB%\xe0\x41\x02\xfa\x41\x1f\n\x1d\x64lp.googleapis.com/JobTrigger\x12\x36\n\x0bjob_trigger\x18\x02 \x01(\x0b\x32!.google.privacy.dlp.v2.JobTrigger\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"K\n\x14GetJobTriggerRequest\x12\x33\n\x04name\x18\x01 \x01(\tB%\xe0\x41\x02\xfa\x41\x1f\n\x1d\x64lp.googleapis.com/JobTrigger"\x88\x02\n\x13\x43reateDlpJobRequest\x12\x43\n\x06parent\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12>\n\x0binspect_job\x18\x02 \x01(\x0b\x32\'.google.privacy.dlp.v2.InspectJobConfigH\x00\x12@\n\x08risk_job\x18\x03 \x01(\x0b\x32,.google.privacy.dlp.v2.RiskAnalysisJobConfigH\x00\x12\x0e\n\x06job_id\x18\x04 \x01(\t\x12\x13\n\x0blocation_id\x18\x05 \x01(\tB\x05\n\x03job"\xbb\x01\n\x16ListJobTriggersRequest\x12\x43\n\x06parent\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x10\n\x08order_by\x18\x04 \x01(\t\x12\x0e\n\x06\x66ilter\x18\x05 \x01(\t\x12\x13\n\x0blocation_id\x18\x07 \x01(\t"k\n\x17ListJobTriggersResponse\x12\x37\n\x0cjob_triggers\x18\x01 \x03(\x0b\x32!.google.privacy.dlp.v2.JobTrigger\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"N\n\x17\x44\x65leteJobTriggerRequest\x12\x33\n\x04name\x18\x01 \x01(\tB%\xe0\x41\x02\xfa\x41\x1f\n\x1d\x64lp.googleapis.com/JobTrigger"\xdd\x01\n\x10InspectJobConfig\x12<\n\x0estorage_config\x18\x01 \x01(\x0b\x32$.google.privacy.dlp.v2.StorageConfig\x12<\n\x0einspect_config\x18\x02 \x01(\x0b\x32$.google.privacy.dlp.v2.InspectConfig\x12\x1d\n\x15inspect_template_name\x18\x03 \x01(\t\x12.\n\x07\x61\x63tions\x18\x04 \x03(\x0b\x32\x1d.google.privacy.dlp.v2.Action"\xeb\x05\n\x06\x44lpJob\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x04type\x18\x02 \x01(\x0e\x32!.google.privacy.dlp.v2.DlpJobType\x12\x35\n\x05state\x18\x03 \x01(\x0e\x32&.google.privacy.dlp.v2.DlpJob.JobState\x12K\n\x0crisk_details\x18\x04 \x01(\x0b\x32\x33.google.privacy.dlp.v2.AnalyzeDataSourceRiskDetailsH\x00\x12J\n\x0finspect_details\x18\x05 \x01(\x0b\x32/.google.privacy.dlp.v2.InspectDataSourceDetailsH\x00\x12/\n\x0b\x63reate_time\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12.\n\nstart_time\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x08 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x18\n\x10job_trigger_name\x18\n \x01(\t\x12,\n\x06\x65rrors\x18\x0b \x03(\x0b\x32\x1c.google.privacy.dlp.v2.Error"o\n\x08JobState\x12\x19\n\x15JOB_STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\x0b\n\x07RUNNING\x10\x02\x12\x08\n\x04\x44ONE\x10\x03\x12\x0c\n\x08\x43\x41NCELED\x10\x04\x12\n\n\x06\x46\x41ILED\x10\x05\x12\n\n\x06\x41\x43TIVE\x10\x06:\x7f\xea\x41|\n\x19\x64lp.googleapis.com/DlpJob\x12$projects/{project}/dlpJobs/{dlp_job}\x12\x39projects/{project}/locations/{location}/dlpJobs/{dlp_job}B\t\n\x07\x64\x65tails"C\n\x10GetDlpJobRequest\x12/\n\x04name\x18\x01 \x01(\tB!\xe0\x41\x02\xfa\x41\x1b\n\x19\x64lp.googleapis.com/DlpJob"\xe8\x01\n\x12ListDlpJobsRequest\x12\x43\n\x06parent\x18\x04 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x0e\n\x06\x66ilter\x18\x01 \x01(\t\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t\x12/\n\x04type\x18\x05 \x01(\x0e\x32!.google.privacy.dlp.v2.DlpJobType\x12\x10\n\x08order_by\x18\x06 \x01(\t\x12\x13\n\x0blocation_id\x18\x07 \x01(\t"[\n\x13ListDlpJobsResponse\x12+\n\x04jobs\x18\x01 \x03(\x0b\x32\x1d.google.privacy.dlp.v2.DlpJob\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"F\n\x13\x43\x61ncelDlpJobRequest\x12/\n\x04name\x18\x01 \x01(\tB!\xe0\x41\x02\xfa\x41\x1b\n\x19\x64lp.googleapis.com/DlpJob"F\n\x13\x46inishDlpJobRequest\x12/\n\x04name\x18\x01 \x01(\tB!\xe0\x41\x02\xfa\x41\x1b\n\x19\x64lp.googleapis.com/DlpJob"F\n\x13\x44\x65leteDlpJobRequest\x12/\n\x04name\x18\x01 \x01(\tB!\xe0\x41\x02\xfa\x41\x1b\n\x19\x64lp.googleapis.com/DlpJob"\xd7\x01\n\x1f\x43reateDeidentifyTemplateRequest\x12=\n\x06parent\x18\x01 \x01(\tB-\xe0\x41\x02\xfa\x41\'\x12%dlp.googleapis.com/DeidentifyTemplate\x12K\n\x13\x64\x65identify_template\x18\x02 \x01(\x0b\x32).google.privacy.dlp.v2.DeidentifyTemplateB\x03\xe0\x41\x02\x12\x13\n\x0btemplate_id\x18\x03 \x01(\t\x12\x13\n\x0blocation_id\x18\x04 \x01(\t"\xd7\x01\n\x1fUpdateDeidentifyTemplateRequest\x12;\n\x04name\x18\x01 \x01(\tB-\xe0\x41\x02\xfa\x41\'\n%dlp.googleapis.com/DeidentifyTemplate\x12\x46\n\x13\x64\x65identify_template\x18\x02 \x01(\x0b\x32).google.privacy.dlp.v2.DeidentifyTemplate\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"[\n\x1cGetDeidentifyTemplateRequest\x12;\n\x04name\x18\x01 \x01(\tB-\xe0\x41\x02\xfa\x41\'\n%dlp.googleapis.com/DeidentifyTemplate"\xad\x01\n\x1eListDeidentifyTemplatesRequest\x12=\n\x06parent\x18\x01 \x01(\tB-\xe0\x41\x02\xfa\x41\'\x12%dlp.googleapis.com/DeidentifyTemplate\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x10\n\x08order_by\x18\x04 \x01(\t\x12\x13\n\x0blocation_id\x18\x05 \x01(\t"\x83\x01\n\x1fListDeidentifyTemplatesResponse\x12G\n\x14\x64\x65identify_templates\x18\x01 \x03(\x0b\x32).google.privacy.dlp.v2.DeidentifyTemplate\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"^\n\x1f\x44\x65leteDeidentifyTemplateRequest\x12;\n\x04name\x18\x01 \x01(\tB-\xe0\x41\x02\xfa\x41\'\n%dlp.googleapis.com/DeidentifyTemplate"\xf4\x01\n\x1bLargeCustomDictionaryConfig\x12<\n\x0boutput_path\x18\x01 \x01(\x0b\x32\'.google.privacy.dlp.v2.CloudStoragePath\x12L\n\x16\x63loud_storage_file_set\x18\x02 \x01(\x0b\x32*.google.privacy.dlp.v2.CloudStorageFileSetH\x00\x12?\n\x0f\x62ig_query_field\x18\x03 \x01(\x0b\x32$.google.privacy.dlp.v2.BigQueryFieldH\x00\x42\x08\n\x06source"8\n\x1aLargeCustomDictionaryStats\x12\x1a\n\x12\x61pprox_num_phrases\x18\x01 \x01(\x03"\xa6\x02\n\x14StoredInfoTypeConfig\x12\x14\n\x0c\x64isplay_name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12U\n\x17large_custom_dictionary\x18\x03 \x01(\x0b\x32\x32.google.privacy.dlp.v2.LargeCustomDictionaryConfigH\x00\x12\x46\n\ndictionary\x18\x04 \x01(\x0b\x32\x30.google.privacy.dlp.v2.CustomInfoType.DictionaryH\x00\x12<\n\x05regex\x18\x05 \x01(\x0b\x32+.google.privacy.dlp.v2.CustomInfoType.RegexH\x00\x42\x06\n\x04type"s\n\x13StoredInfoTypeStats\x12T\n\x17large_custom_dictionary\x18\x01 \x01(\x0b\x32\x31.google.privacy.dlp.v2.LargeCustomDictionaryStatsH\x00\x42\x06\n\x04type"\xa9\x02\n\x15StoredInfoTypeVersion\x12;\n\x06\x63onfig\x18\x01 \x01(\x0b\x32+.google.privacy.dlp.v2.StoredInfoTypeConfig\x12/\n\x0b\x63reate_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x39\n\x05state\x18\x03 \x01(\x0e\x32*.google.privacy.dlp.v2.StoredInfoTypeState\x12,\n\x06\x65rrors\x18\x04 \x03(\x0b\x32\x1c.google.privacy.dlp.v2.Error\x12\x39\n\x05stats\x18\x05 \x01(\x0b\x32*.google.privacy.dlp.v2.StoredInfoTypeStats"\xf1\x03\n\x0eStoredInfoType\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x45\n\x0f\x63urrent_version\x18\x02 \x01(\x0b\x32,.google.privacy.dlp.v2.StoredInfoTypeVersion\x12\x46\n\x10pending_versions\x18\x03 \x03(\x0b\x32,.google.privacy.dlp.v2.StoredInfoTypeVersion:\xc1\x02\xea\x41\xbd\x02\n!dlp.googleapis.com/StoredInfoType\x12?organizations/{organization}/storedInfoTypes/{stored_info_type}\x12\x35projects/{project}/storedInfoTypes/{stored_info_type}\x12Torganizations/{organization}/locations/{location}/storedInfoTypes/{stored_info_type}\x12Jprojects/{project}/locations/{location}/storedInfoTypes/{stored_info_type}"\xcc\x01\n\x1b\x43reateStoredInfoTypeRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\x12!dlp.googleapis.com/StoredInfoType\x12@\n\x06\x63onfig\x18\x02 \x01(\x0b\x32+.google.privacy.dlp.v2.StoredInfoTypeConfigB\x03\xe0\x41\x02\x12\x1b\n\x13stored_info_type_id\x18\x03 \x01(\t\x12\x13\n\x0blocation_id\x18\x04 \x01(\t"\xc4\x01\n\x1bUpdateStoredInfoTypeRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!dlp.googleapis.com/StoredInfoType\x12;\n\x06\x63onfig\x18\x02 \x01(\x0b\x32+.google.privacy.dlp.v2.StoredInfoTypeConfig\x12/\n\x0bupdate_mask\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.FieldMask"S\n\x18GetStoredInfoTypeRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!dlp.googleapis.com/StoredInfoType"\xa5\x01\n\x1aListStoredInfoTypesRequest\x12\x39\n\x06parent\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\x12!dlp.googleapis.com/StoredInfoType\x12\x12\n\npage_token\x18\x02 \x01(\t\x12\x11\n\tpage_size\x18\x03 \x01(\x05\x12\x10\n\x08order_by\x18\x04 \x01(\t\x12\x13\n\x0blocation_id\x18\x05 \x01(\t"x\n\x1bListStoredInfoTypesResponse\x12@\n\x11stored_info_types\x18\x01 \x03(\x0b\x32%.google.privacy.dlp.v2.StoredInfoType\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"V\n\x1b\x44\x65leteStoredInfoTypeRequest\x12\x37\n\x04name\x18\x01 \x01(\tB)\xe0\x41\x02\xfa\x41#\n!dlp.googleapis.com/StoredInfoType"\x94\x01\n\x1eHybridInspectJobTriggerRequest\x12\x33\n\x04name\x18\x01 \x01(\tB%\xe0\x41\x02\xfa\x41\x1f\n\x1d\x64lp.googleapis.com/JobTrigger\x12=\n\x0bhybrid_item\x18\x03 \x01(\x0b\x32(.google.privacy.dlp.v2.HybridContentItem"\x8c\x01\n\x1aHybridInspectDlpJobRequest\x12/\n\x04name\x18\x01 \x01(\tB!\xe0\x41\x02\xfa\x41\x1b\n\x19\x64lp.googleapis.com/DlpJob\x12=\n\x0bhybrid_item\x18\x03 \x01(\x0b\x32(.google.privacy.dlp.v2.HybridContentItem"\x8b\x01\n\x11HybridContentItem\x12\x30\n\x04item\x18\x01 \x01(\x0b\x32".google.privacy.dlp.v2.ContentItem\x12\x44\n\x0f\x66inding_details\x18\x02 \x01(\x0b\x32+.google.privacy.dlp.v2.HybridFindingDetails"\xb0\x02\n\x14HybridFindingDetails\x12;\n\x11\x63ontainer_details\x18\x01 \x01(\x0b\x32 .google.privacy.dlp.v2.Container\x12\x13\n\x0b\x66ile_offset\x18\x02 \x01(\x03\x12\x12\n\nrow_offset\x18\x03 \x01(\x03\x12:\n\rtable_options\x18\x04 \x01(\x0b\x32#.google.privacy.dlp.v2.TableOptions\x12G\n\x06labels\x18\x05 \x03(\x0b\x32\x37.google.privacy.dlp.v2.HybridFindingDetails.LabelsEntry\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"\x17\n\x15HybridInspectResponse*\xbb\x01\n\x12RelationalOperator\x12#\n\x1fRELATIONAL_OPERATOR_UNSPECIFIED\x10\x00\x12\x0c\n\x08\x45QUAL_TO\x10\x01\x12\x10\n\x0cNOT_EQUAL_TO\x10\x02\x12\x10\n\x0cGREATER_THAN\x10\x03\x12\r\n\tLESS_THAN\x10\x04\x12\x1a\n\x16GREATER_THAN_OR_EQUALS\x10\x05\x12\x17\n\x13LESS_THAN_OR_EQUALS\x10\x06\x12\n\n\x06\x45XISTS\x10\x07*\x8d\x01\n\x0cMatchingType\x12\x1d\n\x19MATCHING_TYPE_UNSPECIFIED\x10\x00\x12\x1c\n\x18MATCHING_TYPE_FULL_MATCH\x10\x01\x12\x1f\n\x1bMATCHING_TYPE_PARTIAL_MATCH\x10\x02\x12\x1f\n\x1bMATCHING_TYPE_INVERSE_MATCH\x10\x03*M\n\rContentOption\x12\x17\n\x13\x43ONTENT_UNSPECIFIED\x10\x00\x12\x10\n\x0c\x43ONTENT_TEXT\x10\x01\x12\x11\n\rCONTENT_IMAGE\x10\x02*B\n\x0cMetadataType\x12\x1c\n\x18METADATATYPE_UNSPECIFIED\x10\x00\x12\x14\n\x10STORAGE_METADATA\x10\x02*P\n\x13InfoTypeSupportedBy\x12\x19\n\x15\x45NUM_TYPE_UNSPECIFIED\x10\x00\x12\x0b\n\x07INSPECT\x10\x01\x12\x11\n\rRISK_ANALYSIS\x10\x02*R\n\nDlpJobType\x12\x1c\n\x18\x44LP_JOB_TYPE_UNSPECIFIED\x10\x00\x12\x0f\n\x0bINSPECT_JOB\x10\x01\x12\x15\n\x11RISK_ANALYSIS_JOB\x10\x02*n\n\x13StoredInfoTypeState\x12&\n"STORED_INFO_TYPE_STATE_UNSPECIFIED\x10\x00\x12\x0b\n\x07PENDING\x10\x01\x12\t\n\x05READY\x10\x02\x12\n\n\x06\x46\x41ILED\x10\x03\x12\x0b\n\x07INVALID\x10\x04\x32\xc3I\n\nDlpService\x12\xdb\x01\n\x0eInspectContent\x12,.google.privacy.dlp.v2.InspectContentRequest\x1a-.google.privacy.dlp.v2.InspectContentResponse"l\x82\xd3\xe4\x93\x02\x66"\'/v2/{parent=projects/*}/content:inspect:\x01*Z8"3/v2/{parent=projects/*/locations/*}/content:inspect:\x01*\x12\xcc\x01\n\x0bRedactImage\x12).google.privacy.dlp.v2.RedactImageRequest\x1a*.google.privacy.dlp.v2.RedactImageResponse"f\x82\xd3\xe4\x93\x02`"$/v2/{parent=projects/*}/image:redact:\x01*Z5"0/v2/{parent=projects/*/locations/*}/image:redact:\x01*\x12\xea\x01\n\x11\x44\x65identifyContent\x12/.google.privacy.dlp.v2.DeidentifyContentRequest\x1a\x30.google.privacy.dlp.v2.DeidentifyContentResponse"r\x82\xd3\xe4\x93\x02l"*/v2/{parent=projects/*}/content:deidentify:\x01*Z;"6/v2/{parent=projects/*/locations/*}/content:deidentify:\x01*\x12\xea\x01\n\x11ReidentifyContent\x12/.google.privacy.dlp.v2.ReidentifyContentRequest\x1a\x30.google.privacy.dlp.v2.ReidentifyContentResponse"r\x82\xd3\xe4\x93\x02l"*/v2/{parent=projects/*}/content:reidentify:\x01*Z;"6/v2/{parent=projects/*/locations/*}/content:reidentify:\x01*\x12\xb0\x01\n\rListInfoTypes\x12+.google.privacy.dlp.v2.ListInfoTypesRequest\x1a,.google.privacy.dlp.v2.ListInfoTypesResponse"D\x82\xd3\xe4\x93\x02\x35\x12\r/v2/infoTypesZ$\x12"/v2/{parent=locations/*}/infoTypes\xda\x41\x06parent\x12\xf4\x02\n\x15\x43reateInspectTemplate\x12\x33.google.privacy.dlp.v2.CreateInspectTemplateRequest\x1a&.google.privacy.dlp.v2.InspectTemplate"\xfd\x01\x82\xd3\xe4\x93\x02\xdc\x01"-/v2/{parent=organizations/*}/inspectTemplates:\x01*Z>"9/v2/{parent=organizations/*/locations/*}/inspectTemplates:\x01*Z-"(/v2/{parent=projects/*}/inspectTemplates:\x01*Z9"4/v2/{parent=projects/*/locations/*}/inspectTemplates:\x01*\xda\x41\x17parent,inspect_template\x12\xfe\x02\n\x15UpdateInspectTemplate\x12\x33.google.privacy.dlp.v2.UpdateInspectTemplateRequest\x1a&.google.privacy.dlp.v2.InspectTemplate"\x87\x02\x82\xd3\xe4\x93\x02\xdc\x01\x32-/v2/{name=organizations/*/inspectTemplates/*}:\x01*Z>29/v2/{name=organizations/*/locations/*/inspectTemplates/*}:\x01*Z-2(/v2/{name=projects/*/inspectTemplates/*}:\x01*Z924/v2/{name=projects/*/locations/*/inspectTemplates/*}:\x01*\xda\x41!name,inspect_template,update_mask\x12\xcf\x02\n\x12GetInspectTemplate\x12\x30.google.privacy.dlp.v2.GetInspectTemplateRequest\x1a&.google.privacy.dlp.v2.InspectTemplate"\xde\x01\x82\xd3\xe4\x93\x02\xd0\x01\x12-/v2/{name=organizations/*/inspectTemplates/*}Z;\x12\x39/v2/{name=organizations/*/locations/*/inspectTemplates/*}Z*\x12(/v2/{name=projects/*/inspectTemplates/*}Z6\x12\x34/v2/{name=projects/*/locations/*/inspectTemplates/*}\xda\x41\x04name\x12\xe2\x02\n\x14ListInspectTemplates\x12\x32.google.privacy.dlp.v2.ListInspectTemplatesRequest\x1a\x33.google.privacy.dlp.v2.ListInspectTemplatesResponse"\xe0\x01\x82\xd3\xe4\x93\x02\xd0\x01\x12-/v2/{parent=organizations/*}/inspectTemplatesZ;\x12\x39/v2/{parent=organizations/*/locations/*}/inspectTemplatesZ*\x12(/v2/{parent=projects/*}/inspectTemplatesZ6\x12\x34/v2/{parent=projects/*/locations/*}/inspectTemplates\xda\x41\x06parent\x12\xc5\x02\n\x15\x44\x65leteInspectTemplate\x12\x33.google.privacy.dlp.v2.DeleteInspectTemplateRequest\x1a\x16.google.protobuf.Empty"\xde\x01\x82\xd3\xe4\x93\x02\xd0\x01*-/v2/{name=organizations/*/inspectTemplates/*}Z;*9/v2/{name=organizations/*/locations/*/inspectTemplates/*}Z**(/v2/{name=projects/*/inspectTemplates/*}Z6*4/v2/{name=projects/*/locations/*/inspectTemplates/*}\xda\x41\x04name\x12\x8c\x03\n\x18\x43reateDeidentifyTemplate\x12\x36.google.privacy.dlp.v2.CreateDeidentifyTemplateRequest\x1a).google.privacy.dlp.v2.DeidentifyTemplate"\x8c\x02\x82\xd3\xe4\x93\x02\xe8\x01"0/v2/{parent=organizations/*}/deidentifyTemplates:\x01*ZA"\x12\x12*"9/v2/{name=projects/*/locations/*/dlpJobs/*}:hybridInspect:\x01*\xda\x41\x04name\x12\x91\x01\n\x0c\x46inishDlpJob\x12*.google.privacy.dlp.v2.FinishDlpJobRequest\x1a\x16.google.protobuf.Empty"=\x82\xd3\xe4\x93\x02\x37"2/v2/{name=projects/*/locations/*/dlpJobs/*}:finish:\x01*\x1a\x46\xca\x41\x12\x64lp.googleapis.com\xd2\x41.https://www.googleapis.com/auth/cloud-platformB\x85\x02\n\x19\x63om.google.privacy.dlp.v2B\x08\x44lpProtoP\x01Z8google.golang.org/genproto/googleapis/privacy/dlp/v2;dlp\xaa\x02\x13Google.Cloud.Dlp.V2\xca\x02\x13Google\\Cloud\\Dlp\\V2\xea\x02\x16Google::Cloud::Dlp::V2\xea\x41\\\n\'dlp.googleapis.com/OrganizationLocation\x12\x31organizations/{organization}/locations/{location}b\x06proto3', - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_cloud_dot_dlp__v2_dot_proto_dot_storage__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - google_dot_rpc_dot_status__pb2.DESCRIPTOR, - google_dot_type_dot_date__pb2.DESCRIPTOR, - google_dot_type_dot_dayofweek__pb2.DESCRIPTOR, - google_dot_type_dot_timeofday__pb2.DESCRIPTOR, - ], -) - -_RELATIONALOPERATOR = _descriptor.EnumDescriptor( - name="RelationalOperator", - full_name="google.privacy.dlp.v2.RelationalOperator", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="RELATIONAL_OPERATOR_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="EQUAL_TO", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="NOT_EQUAL_TO", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="GREATER_THAN", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="LESS_THAN", index=4, number=4, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="GREATER_THAN_OR_EQUALS", - index=5, - number=5, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="LESS_THAN_OR_EQUALS", - index=6, - number=6, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="EXISTS", index=7, number=7, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=32351, - serialized_end=32538, -) -_sym_db.RegisterEnumDescriptor(_RELATIONALOPERATOR) - -RelationalOperator = enum_type_wrapper.EnumTypeWrapper(_RELATIONALOPERATOR) -_MATCHINGTYPE = _descriptor.EnumDescriptor( - name="MatchingType", - full_name="google.privacy.dlp.v2.MatchingType", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="MATCHING_TYPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="MATCHING_TYPE_FULL_MATCH", - index=1, - number=1, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="MATCHING_TYPE_PARTIAL_MATCH", - index=2, - number=2, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="MATCHING_TYPE_INVERSE_MATCH", - index=3, - number=3, - serialized_options=None, - type=None, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=32541, - serialized_end=32682, -) -_sym_db.RegisterEnumDescriptor(_MATCHINGTYPE) - -MatchingType = enum_type_wrapper.EnumTypeWrapper(_MATCHINGTYPE) -_CONTENTOPTION = _descriptor.EnumDescriptor( - name="ContentOption", - full_name="google.privacy.dlp.v2.ContentOption", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="CONTENT_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="CONTENT_TEXT", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="CONTENT_IMAGE", index=2, number=2, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=32684, - serialized_end=32761, -) -_sym_db.RegisterEnumDescriptor(_CONTENTOPTION) - -ContentOption = enum_type_wrapper.EnumTypeWrapper(_CONTENTOPTION) -_METADATATYPE = _descriptor.EnumDescriptor( - name="MetadataType", - full_name="google.privacy.dlp.v2.MetadataType", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="METADATATYPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="STORAGE_METADATA", - index=1, - number=2, - serialized_options=None, - type=None, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=32763, - serialized_end=32829, -) -_sym_db.RegisterEnumDescriptor(_METADATATYPE) - -MetadataType = enum_type_wrapper.EnumTypeWrapper(_METADATATYPE) -_INFOTYPESUPPORTEDBY = _descriptor.EnumDescriptor( - name="InfoTypeSupportedBy", - full_name="google.privacy.dlp.v2.InfoTypeSupportedBy", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="ENUM_TYPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="INSPECT", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="RISK_ANALYSIS", index=2, number=2, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=32831, - serialized_end=32911, -) -_sym_db.RegisterEnumDescriptor(_INFOTYPESUPPORTEDBY) - -InfoTypeSupportedBy = enum_type_wrapper.EnumTypeWrapper(_INFOTYPESUPPORTEDBY) -_DLPJOBTYPE = _descriptor.EnumDescriptor( - name="DlpJobType", - full_name="google.privacy.dlp.v2.DlpJobType", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="DLP_JOB_TYPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="INSPECT_JOB", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="RISK_ANALYSIS_JOB", - index=2, - number=2, - serialized_options=None, - type=None, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=32913, - serialized_end=32995, -) -_sym_db.RegisterEnumDescriptor(_DLPJOBTYPE) - -DlpJobType = enum_type_wrapper.EnumTypeWrapper(_DLPJOBTYPE) -_STOREDINFOTYPESTATE = _descriptor.EnumDescriptor( - name="StoredInfoTypeState", - full_name="google.privacy.dlp.v2.StoredInfoTypeState", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="STORED_INFO_TYPE_STATE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="PENDING", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="READY", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="FAILED", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="INVALID", index=4, number=4, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=32997, - serialized_end=33107, -) -_sym_db.RegisterEnumDescriptor(_STOREDINFOTYPESTATE) - -StoredInfoTypeState = enum_type_wrapper.EnumTypeWrapper(_STOREDINFOTYPESTATE) -RELATIONAL_OPERATOR_UNSPECIFIED = 0 -EQUAL_TO = 1 -NOT_EQUAL_TO = 2 -GREATER_THAN = 3 -LESS_THAN = 4 -GREATER_THAN_OR_EQUALS = 5 -LESS_THAN_OR_EQUALS = 6 -EXISTS = 7 -MATCHING_TYPE_UNSPECIFIED = 0 -MATCHING_TYPE_FULL_MATCH = 1 -MATCHING_TYPE_PARTIAL_MATCH = 2 -MATCHING_TYPE_INVERSE_MATCH = 3 -CONTENT_UNSPECIFIED = 0 -CONTENT_TEXT = 1 -CONTENT_IMAGE = 2 -METADATATYPE_UNSPECIFIED = 0 -STORAGE_METADATA = 2 -ENUM_TYPE_UNSPECIFIED = 0 -INSPECT = 1 -RISK_ANALYSIS = 2 -DLP_JOB_TYPE_UNSPECIFIED = 0 -INSPECT_JOB = 1 -RISK_ANALYSIS_JOB = 2 -STORED_INFO_TYPE_STATE_UNSPECIFIED = 0 -PENDING = 1 -READY = 2 -FAILED = 3 -INVALID = 4 - - -_BYTECONTENTITEM_BYTESTYPE = _descriptor.EnumDescriptor( - name="BytesType", - full_name="google.privacy.dlp.v2.ByteContentItem.BytesType", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="BYTES_TYPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="IMAGE", index=1, number=6, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="IMAGE_JPEG", index=2, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="IMAGE_BMP", index=3, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="IMAGE_PNG", index=4, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="IMAGE_SVG", index=5, number=4, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="TEXT_UTF8", index=6, number=5, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="WORD_DOCUMENT", index=7, number=7, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="PDF", index=8, number=8, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="AVRO", index=9, number=11, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1938, - serialized_end=2102, -) -_sym_db.RegisterEnumDescriptor(_BYTECONTENTITEM_BYTESTYPE) - -_OUTPUTSTORAGECONFIG_OUTPUTSCHEMA = _descriptor.EnumDescriptor( - name="OutputSchema", - full_name="google.privacy.dlp.v2.OutputStorageConfig.OutputSchema", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="OUTPUT_SCHEMA_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="BASIC_COLUMNS", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="GCS_COLUMNS", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="DATASTORE_COLUMNS", - index=3, - number=3, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="BIG_QUERY_COLUMNS", - index=4, - number=4, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="ALL_COLUMNS", index=5, number=5, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=6912, - serialized_end=7056, -) -_sym_db.RegisterEnumDescriptor(_OUTPUTSTORAGECONFIG_OUTPUTSCHEMA) - -_TIMEPARTCONFIG_TIMEPART = _descriptor.EnumDescriptor( - name="TimePart", - full_name="google.privacy.dlp.v2.TimePartConfig.TimePart", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="TIME_PART_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="YEAR", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="MONTH", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="DAY_OF_MONTH", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="DAY_OF_WEEK", index=4, number=4, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="WEEK_OF_YEAR", index=5, number=5, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="HOUR_OF_DAY", index=6, number=6, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=16851, - serialized_end=16979, -) -_sym_db.RegisterEnumDescriptor(_TIMEPARTCONFIG_TIMEPART) - -_CHARSTOIGNORE_COMMONCHARSTOIGNORE = _descriptor.EnumDescriptor( - name="CommonCharsToIgnore", - full_name="google.privacy.dlp.v2.CharsToIgnore.CommonCharsToIgnore", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="COMMON_CHARS_TO_IGNORE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="NUMERIC", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ALPHA_UPPER_CASE", - index=2, - number=2, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="ALPHA_LOWER_CASE", - index=3, - number=3, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="PUNCTUATION", index=4, number=4, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="WHITESPACE", index=5, number=5, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=17512, - serialized_end=17663, -) -_sym_db.RegisterEnumDescriptor(_CHARSTOIGNORE_COMMONCHARSTOIGNORE) - -_CRYPTOREPLACEFFXFPECONFIG_FFXCOMMONNATIVEALPHABET = _descriptor.EnumDescriptor( - name="FfxCommonNativeAlphabet", - full_name="google.privacy.dlp.v2.CryptoReplaceFfxFpeConfig.FfxCommonNativeAlphabet", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="NUMERIC", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="HEXADECIMAL", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="UPPER_CASE_ALPHA_NUMERIC", - index=3, - number=3, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="ALPHA_NUMERIC", index=4, number=4, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=18596, - serialized_end=18744, -) -_sym_db.RegisterEnumDescriptor(_CRYPTOREPLACEFFXFPECONFIG_FFXCOMMONNATIVEALPHABET) - -_RECORDCONDITION_EXPRESSIONS_LOGICALOPERATOR = _descriptor.EnumDescriptor( - name="LogicalOperator", - full_name="google.privacy.dlp.v2.RecordCondition.Expressions.LogicalOperator", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="LOGICAL_OPERATOR_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="AND", index=1, number=1, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=20742, - serialized_end=20802, -) -_sym_db.RegisterEnumDescriptor(_RECORDCONDITION_EXPRESSIONS_LOGICALOPERATOR) - -_TRANSFORMATIONSUMMARY_TRANSFORMATIONRESULTCODE = _descriptor.EnumDescriptor( - name="TransformationResultCode", - full_name="google.privacy.dlp.v2.TransformationSummary.TransformationResultCode", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="TRANSFORMATION_RESULT_CODE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="SUCCESS", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ERROR", index=2, number=2, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=21524, - serialized_end=21618, -) -_sym_db.RegisterEnumDescriptor(_TRANSFORMATIONSUMMARY_TRANSFORMATIONRESULTCODE) - -_JOBTRIGGER_STATUS = _descriptor.EnumDescriptor( - name="Status", - full_name="google.privacy.dlp.v2.JobTrigger.Status", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="STATUS_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="HEALTHY", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="PAUSED", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="CANCELLED", index=3, number=3, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=23608, - serialized_end=23680, -) -_sym_db.RegisterEnumDescriptor(_JOBTRIGGER_STATUS) - -_DLPJOB_JOBSTATE = _descriptor.EnumDescriptor( - name="JobState", - full_name="google.privacy.dlp.v2.DlpJob.JobState", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="JOB_STATE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="PENDING", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="RUNNING", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="DONE", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="CANCELED", index=4, number=4, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="FAILED", index=5, number=5, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="ACTIVE", index=6, number=6, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=27393, - serialized_end=27504, -) -_sym_db.RegisterEnumDescriptor(_DLPJOB_JOBSTATE) - - -_EXCLUDEINFOTYPES = _descriptor.Descriptor( - name="ExcludeInfoTypes", - full_name="google.privacy.dlp.v2.ExcludeInfoTypes", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="info_types", - full_name="google.privacy.dlp.v2.ExcludeInfoTypes.info_types", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=453, - serialized_end=524, -) - - -_EXCLUSIONRULE = _descriptor.Descriptor( - name="ExclusionRule", - full_name="google.privacy.dlp.v2.ExclusionRule", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="dictionary", - full_name="google.privacy.dlp.v2.ExclusionRule.dictionary", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="regex", - full_name="google.privacy.dlp.v2.ExclusionRule.regex", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="exclude_info_types", - full_name="google.privacy.dlp.v2.ExclusionRule.exclude_info_types", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="matching_type", - full_name="google.privacy.dlp.v2.ExclusionRule.matching_type", - index=3, - number=4, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="type", - full_name="google.privacy.dlp.v2.ExclusionRule.type", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=527, - serialized_end=815, -) - - -_INSPECTIONRULE = _descriptor.Descriptor( - name="InspectionRule", - full_name="google.privacy.dlp.v2.InspectionRule", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="hotword_rule", - full_name="google.privacy.dlp.v2.InspectionRule.hotword_rule", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="exclusion_rule", - full_name="google.privacy.dlp.v2.InspectionRule.exclusion_rule", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="type", - full_name="google.privacy.dlp.v2.InspectionRule.type", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=818, - serialized_end=995, -) - - -_INSPECTIONRULESET = _descriptor.Descriptor( - name="InspectionRuleSet", - full_name="google.privacy.dlp.v2.InspectionRuleSet", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="info_types", - full_name="google.privacy.dlp.v2.InspectionRuleSet.info_types", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="rules", - full_name="google.privacy.dlp.v2.InspectionRuleSet.rules", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=997, - serialized_end=1123, -) - - -_INSPECTCONFIG_FINDINGLIMITS_INFOTYPELIMIT = _descriptor.Descriptor( - name="InfoTypeLimit", - full_name="google.privacy.dlp.v2.InspectConfig.FindingLimits.InfoTypeLimit", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="info_type", - full_name="google.privacy.dlp.v2.InspectConfig.FindingLimits.InfoTypeLimit.info_type", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="max_findings", - full_name="google.privacy.dlp.v2.InspectConfig.FindingLimits.InfoTypeLimit.max_findings", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1748, - serialized_end=1837, -) - -_INSPECTCONFIG_FINDINGLIMITS = _descriptor.Descriptor( - name="FindingLimits", - full_name="google.privacy.dlp.v2.InspectConfig.FindingLimits", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="max_findings_per_item", - full_name="google.privacy.dlp.v2.InspectConfig.FindingLimits.max_findings_per_item", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="max_findings_per_request", - full_name="google.privacy.dlp.v2.InspectConfig.FindingLimits.max_findings_per_request", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="max_findings_per_info_type", - full_name="google.privacy.dlp.v2.InspectConfig.FindingLimits.max_findings_per_info_type", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_INSPECTCONFIG_FINDINGLIMITS_INFOTYPELIMIT], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1564, - serialized_end=1837, -) - -_INSPECTCONFIG = _descriptor.Descriptor( - name="InspectConfig", - full_name="google.privacy.dlp.v2.InspectConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="info_types", - full_name="google.privacy.dlp.v2.InspectConfig.info_types", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="min_likelihood", - full_name="google.privacy.dlp.v2.InspectConfig.min_likelihood", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="limits", - full_name="google.privacy.dlp.v2.InspectConfig.limits", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="include_quote", - full_name="google.privacy.dlp.v2.InspectConfig.include_quote", - index=3, - number=4, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="exclude_info_types", - full_name="google.privacy.dlp.v2.InspectConfig.exclude_info_types", - index=4, - number=5, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="custom_info_types", - full_name="google.privacy.dlp.v2.InspectConfig.custom_info_types", - index=5, - number=6, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="content_options", - full_name="google.privacy.dlp.v2.InspectConfig.content_options", - index=6, - number=8, - type=14, - cpp_type=8, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="rule_set", - full_name="google.privacy.dlp.v2.InspectConfig.rule_set", - index=7, - number=10, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_INSPECTCONFIG_FINDINGLIMITS], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1126, - serialized_end=1837, -) - - -_BYTECONTENTITEM = _descriptor.Descriptor( - name="ByteContentItem", - full_name="google.privacy.dlp.v2.ByteContentItem", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="type", - full_name="google.privacy.dlp.v2.ByteContentItem.type", - index=0, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="data", - full_name="google.privacy.dlp.v2.ByteContentItem.data", - index=1, - number=2, - type=12, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"", - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_BYTECONTENTITEM_BYTESTYPE], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1840, - serialized_end=2102, -) - - -_CONTENTITEM = _descriptor.Descriptor( - name="ContentItem", - full_name="google.privacy.dlp.v2.ContentItem", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="value", - full_name="google.privacy.dlp.v2.ContentItem.value", - index=0, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="table", - full_name="google.privacy.dlp.v2.ContentItem.table", - index=1, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="byte_item", - full_name="google.privacy.dlp.v2.ContentItem.byte_item", - index=2, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="data_item", - full_name="google.privacy.dlp.v2.ContentItem.data_item", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=2105, - serialized_end=2256, -) - - -_TABLE_ROW = _descriptor.Descriptor( - name="Row", - full_name="google.privacy.dlp.v2.Table.Row", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="values", - full_name="google.privacy.dlp.v2.Table.Row.values", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2365, - serialized_end=2416, -) - -_TABLE = _descriptor.Descriptor( - name="Table", - full_name="google.privacy.dlp.v2.Table", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="headers", - full_name="google.privacy.dlp.v2.Table.headers", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="rows", - full_name="google.privacy.dlp.v2.Table.rows", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_TABLE_ROW], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2259, - serialized_end=2416, -) - - -_INSPECTRESULT = _descriptor.Descriptor( - name="InspectResult", - full_name="google.privacy.dlp.v2.InspectResult", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="findings", - full_name="google.privacy.dlp.v2.InspectResult.findings", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="findings_truncated", - full_name="google.privacy.dlp.v2.InspectResult.findings_truncated", - index=1, - number=2, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2418, - serialized_end=2511, -) - - -_FINDING_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.privacy.dlp.v2.Finding.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.privacy.dlp.v2.Finding.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.privacy.dlp.v2.Finding.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3091, - serialized_end=3136, -) - -_FINDING = _descriptor.Descriptor( - name="Finding", - full_name="google.privacy.dlp.v2.Finding", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.privacy.dlp.v2.Finding.name", - index=0, - number=14, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="quote", - full_name="google.privacy.dlp.v2.Finding.quote", - index=1, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="info_type", - full_name="google.privacy.dlp.v2.Finding.info_type", - index=2, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="likelihood", - full_name="google.privacy.dlp.v2.Finding.likelihood", - index=3, - number=3, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="location", - full_name="google.privacy.dlp.v2.Finding.location", - index=4, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="create_time", - full_name="google.privacy.dlp.v2.Finding.create_time", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="quote_info", - full_name="google.privacy.dlp.v2.Finding.quote_info", - index=6, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="resource_name", - full_name="google.privacy.dlp.v2.Finding.resource_name", - index=7, - number=8, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\372A\033\n\031dlp.googleapis.com/DlpJob", - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="trigger_name", - full_name="google.privacy.dlp.v2.Finding.trigger_name", - index=8, - number=9, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\372A\037\n\035dlp.googleapis.com/JobTrigger", - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.privacy.dlp.v2.Finding.labels", - index=9, - number=10, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="job_create_time", - full_name="google.privacy.dlp.v2.Finding.job_create_time", - index=10, - number=11, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="job_name", - full_name="google.privacy.dlp.v2.Finding.job_name", - index=11, - number=13, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\372A\033\n\031dlp.googleapis.com/DlpJob", - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_FINDING_LABELSENTRY], - enum_types=[], - serialized_options=b"\352AX\n\032dlp.googleapis.com/Finding\022:projects/{project}/locations/{location}/findings/{finding}", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2514, - serialized_end=3229, -) - - -_LOCATION = _descriptor.Descriptor( - name="Location", - full_name="google.privacy.dlp.v2.Location", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="byte_range", - full_name="google.privacy.dlp.v2.Location.byte_range", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="codepoint_range", - full_name="google.privacy.dlp.v2.Location.codepoint_range", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="content_locations", - full_name="google.privacy.dlp.v2.Location.content_locations", - index=2, - number=7, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="container", - full_name="google.privacy.dlp.v2.Location.container", - index=3, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3232, - serialized_end=3467, -) - - -_CONTENTLOCATION = _descriptor.Descriptor( - name="ContentLocation", - full_name="google.privacy.dlp.v2.ContentLocation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="container_name", - full_name="google.privacy.dlp.v2.ContentLocation.container_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="record_location", - full_name="google.privacy.dlp.v2.ContentLocation.record_location", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="image_location", - full_name="google.privacy.dlp.v2.ContentLocation.image_location", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="document_location", - full_name="google.privacy.dlp.v2.ContentLocation.document_location", - index=3, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="metadata_location", - full_name="google.privacy.dlp.v2.ContentLocation.metadata_location", - index=4, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="container_timestamp", - full_name="google.privacy.dlp.v2.ContentLocation.container_timestamp", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="container_version", - full_name="google.privacy.dlp.v2.ContentLocation.container_version", - index=6, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="location", - full_name="google.privacy.dlp.v2.ContentLocation.location", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=3470, - serialized_end=3877, -) - - -_METADATALOCATION = _descriptor.Descriptor( - name="MetadataLocation", - full_name="google.privacy.dlp.v2.MetadataLocation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="type", - full_name="google.privacy.dlp.v2.MetadataLocation.type", - index=0, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="storage_label", - full_name="google.privacy.dlp.v2.MetadataLocation.storage_label", - index=1, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="label", - full_name="google.privacy.dlp.v2.MetadataLocation.label", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=3880, - serialized_end=4028, -) - - -_STORAGEMETADATALABEL = _descriptor.Descriptor( - name="StorageMetadataLabel", - full_name="google.privacy.dlp.v2.StorageMetadataLabel", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.privacy.dlp.v2.StorageMetadataLabel.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4030, - serialized_end=4065, -) - - -_DOCUMENTLOCATION = _descriptor.Descriptor( - name="DocumentLocation", - full_name="google.privacy.dlp.v2.DocumentLocation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="file_offset", - full_name="google.privacy.dlp.v2.DocumentLocation.file_offset", - index=0, - number=1, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4067, - serialized_end=4106, -) - - -_RECORDLOCATION = _descriptor.Descriptor( - name="RecordLocation", - full_name="google.privacy.dlp.v2.RecordLocation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="record_key", - full_name="google.privacy.dlp.v2.RecordLocation.record_key", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="field_id", - full_name="google.privacy.dlp.v2.RecordLocation.field_id", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="table_location", - full_name="google.privacy.dlp.v2.RecordLocation.table_location", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4109, - serialized_end=4291, -) - - -_TABLELOCATION = _descriptor.Descriptor( - name="TableLocation", - full_name="google.privacy.dlp.v2.TableLocation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="row_index", - full_name="google.privacy.dlp.v2.TableLocation.row_index", - index=0, - number=1, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4293, - serialized_end=4327, -) - - -_CONTAINER = _descriptor.Descriptor( - name="Container", - full_name="google.privacy.dlp.v2.Container", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="type", - full_name="google.privacy.dlp.v2.Container.type", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.privacy.dlp.v2.Container.project_id", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="full_path", - full_name="google.privacy.dlp.v2.Container.full_path", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="root_path", - full_name="google.privacy.dlp.v2.Container.root_path", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="relative_path", - full_name="google.privacy.dlp.v2.Container.relative_path", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_time", - full_name="google.privacy.dlp.v2.Container.update_time", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="version", - full_name="google.privacy.dlp.v2.Container.version", - index=6, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4330, - serialized_end=4502, -) - - -_RANGE = _descriptor.Descriptor( - name="Range", - full_name="google.privacy.dlp.v2.Range", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="start", - full_name="google.privacy.dlp.v2.Range.start", - index=0, - number=1, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="end", - full_name="google.privacy.dlp.v2.Range.end", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4504, - serialized_end=4539, -) - - -_IMAGELOCATION = _descriptor.Descriptor( - name="ImageLocation", - full_name="google.privacy.dlp.v2.ImageLocation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="bounding_boxes", - full_name="google.privacy.dlp.v2.ImageLocation.bounding_boxes", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4541, - serialized_end=4616, -) - - -_BOUNDINGBOX = _descriptor.Descriptor( - name="BoundingBox", - full_name="google.privacy.dlp.v2.BoundingBox", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="top", - full_name="google.privacy.dlp.v2.BoundingBox.top", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="left", - full_name="google.privacy.dlp.v2.BoundingBox.left", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="width", - full_name="google.privacy.dlp.v2.BoundingBox.width", - index=2, - number=3, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="height", - full_name="google.privacy.dlp.v2.BoundingBox.height", - index=3, - number=4, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4618, - serialized_end=4689, -) - - -_REDACTIMAGEREQUEST_IMAGEREDACTIONCONFIG = _descriptor.Descriptor( - name="ImageRedactionConfig", - full_name="google.privacy.dlp.v2.RedactImageRequest.ImageRedactionConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="info_type", - full_name="google.privacy.dlp.v2.RedactImageRequest.ImageRedactionConfig.info_type", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="redact_all_text", - full_name="google.privacy.dlp.v2.RedactImageRequest.ImageRedactionConfig.redact_all_text", - index=1, - number=2, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="redaction_color", - full_name="google.privacy.dlp.v2.RedactImageRequest.ImageRedactionConfig.redaction_color", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="target", - full_name="google.privacy.dlp.v2.RedactImageRequest.ImageRedactionConfig.target", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=5046, - serialized_end=5214, -) - -_REDACTIMAGEREQUEST = _descriptor.Descriptor( - name="RedactImageRequest", - full_name="google.privacy.dlp.v2.RedactImageRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.privacy.dlp.v2.RedactImageRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\372A-\n+cloudresourcemanager.googleapis.com/Project", - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="location_id", - full_name="google.privacy.dlp.v2.RedactImageRequest.location_id", - index=1, - number=8, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="inspect_config", - full_name="google.privacy.dlp.v2.RedactImageRequest.inspect_config", - index=2, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="image_redaction_configs", - full_name="google.privacy.dlp.v2.RedactImageRequest.image_redaction_configs", - index=3, - number=5, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="include_findings", - full_name="google.privacy.dlp.v2.RedactImageRequest.include_findings", - index=4, - number=6, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="byte_item", - full_name="google.privacy.dlp.v2.RedactImageRequest.byte_item", - index=5, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_REDACTIMAGEREQUEST_IMAGEREDACTIONCONFIG], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4692, - serialized_end=5214, -) - - -_COLOR = _descriptor.Descriptor( - name="Color", - full_name="google.privacy.dlp.v2.Color", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="red", - full_name="google.privacy.dlp.v2.Color.red", - index=0, - number=1, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="green", - full_name="google.privacy.dlp.v2.Color.green", - index=1, - number=2, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="blue", - full_name="google.privacy.dlp.v2.Color.blue", - index=2, - number=3, - type=2, - cpp_type=6, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5216, - serialized_end=5265, -) - - -_REDACTIMAGERESPONSE = _descriptor.Descriptor( - name="RedactImageResponse", - full_name="google.privacy.dlp.v2.RedactImageResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="redacted_image", - full_name="google.privacy.dlp.v2.RedactImageResponse.redacted_image", - index=0, - number=1, - type=12, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"", - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="extracted_text", - full_name="google.privacy.dlp.v2.RedactImageResponse.extracted_text", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="inspect_result", - full_name="google.privacy.dlp.v2.RedactImageResponse.inspect_result", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5268, - serialized_end=5399, -) - - -_DEIDENTIFYCONTENTREQUEST = _descriptor.Descriptor( - name="DeidentifyContentRequest", - full_name="google.privacy.dlp.v2.DeidentifyContentRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.privacy.dlp.v2.DeidentifyContentRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\372A-\n+cloudresourcemanager.googleapis.com/Project", - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="deidentify_config", - full_name="google.privacy.dlp.v2.DeidentifyContentRequest.deidentify_config", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="inspect_config", - full_name="google.privacy.dlp.v2.DeidentifyContentRequest.inspect_config", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="item", - full_name="google.privacy.dlp.v2.DeidentifyContentRequest.item", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="inspect_template_name", - full_name="google.privacy.dlp.v2.DeidentifyContentRequest.inspect_template_name", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="deidentify_template_name", - full_name="google.privacy.dlp.v2.DeidentifyContentRequest.deidentify_template_name", - index=5, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="location_id", - full_name="google.privacy.dlp.v2.DeidentifyContentRequest.location_id", - index=6, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5402, - serialized_end=5760, -) - - -_DEIDENTIFYCONTENTRESPONSE = _descriptor.Descriptor( - name="DeidentifyContentResponse", - full_name="google.privacy.dlp.v2.DeidentifyContentResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="item", - full_name="google.privacy.dlp.v2.DeidentifyContentResponse.item", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="overview", - full_name="google.privacy.dlp.v2.DeidentifyContentResponse.overview", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5763, - serialized_end=5905, -) - - -_REIDENTIFYCONTENTREQUEST = _descriptor.Descriptor( - name="ReidentifyContentRequest", - full_name="google.privacy.dlp.v2.ReidentifyContentRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.privacy.dlp.v2.ReidentifyContentRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project", - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="reidentify_config", - full_name="google.privacy.dlp.v2.ReidentifyContentRequest.reidentify_config", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="inspect_config", - full_name="google.privacy.dlp.v2.ReidentifyContentRequest.inspect_config", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="item", - full_name="google.privacy.dlp.v2.ReidentifyContentRequest.item", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="inspect_template_name", - full_name="google.privacy.dlp.v2.ReidentifyContentRequest.inspect_template_name", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="reidentify_template_name", - full_name="google.privacy.dlp.v2.ReidentifyContentRequest.reidentify_template_name", - index=5, - number=6, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="location_id", - full_name="google.privacy.dlp.v2.ReidentifyContentRequest.location_id", - index=6, - number=7, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=5908, - serialized_end=6269, -) - - -_REIDENTIFYCONTENTRESPONSE = _descriptor.Descriptor( - name="ReidentifyContentResponse", - full_name="google.privacy.dlp.v2.ReidentifyContentResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="item", - full_name="google.privacy.dlp.v2.ReidentifyContentResponse.item", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="overview", - full_name="google.privacy.dlp.v2.ReidentifyContentResponse.overview", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6272, - serialized_end=6414, -) - - -_INSPECTCONTENTREQUEST = _descriptor.Descriptor( - name="InspectContentRequest", - full_name="google.privacy.dlp.v2.InspectContentRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.privacy.dlp.v2.InspectContentRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\372A-\n+cloudresourcemanager.googleapis.com/Project", - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="inspect_config", - full_name="google.privacy.dlp.v2.InspectContentRequest.inspect_config", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="item", - full_name="google.privacy.dlp.v2.InspectContentRequest.item", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="inspect_template_name", - full_name="google.privacy.dlp.v2.InspectContentRequest.inspect_template_name", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="location_id", - full_name="google.privacy.dlp.v2.InspectContentRequest.location_id", - index=4, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6417, - serialized_end=6670, -) - - -_INSPECTCONTENTRESPONSE = _descriptor.Descriptor( - name="InspectContentResponse", - full_name="google.privacy.dlp.v2.InspectContentResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="result", - full_name="google.privacy.dlp.v2.InspectContentResponse.result", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=6672, - serialized_end=6750, -) - - -_OUTPUTSTORAGECONFIG = _descriptor.Descriptor( - name="OutputStorageConfig", - full_name="google.privacy.dlp.v2.OutputStorageConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="table", - full_name="google.privacy.dlp.v2.OutputStorageConfig.table", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="output_schema", - full_name="google.privacy.dlp.v2.OutputStorageConfig.output_schema", - index=1, - number=3, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_OUTPUTSTORAGECONFIG_OUTPUTSCHEMA], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="type", - full_name="google.privacy.dlp.v2.OutputStorageConfig.type", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=6753, - serialized_end=7064, -) - - -_INFOTYPESTATS = _descriptor.Descriptor( - name="InfoTypeStats", - full_name="google.privacy.dlp.v2.InfoTypeStats", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="info_type", - full_name="google.privacy.dlp.v2.InfoTypeStats.info_type", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="count", - full_name="google.privacy.dlp.v2.InfoTypeStats.count", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=7066, - serialized_end=7148, -) - - -_INSPECTDATASOURCEDETAILS_REQUESTEDOPTIONS = _descriptor.Descriptor( - name="RequestedOptions", - full_name="google.privacy.dlp.v2.InspectDataSourceDetails.RequestedOptions", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="snapshot_inspect_template", - full_name="google.privacy.dlp.v2.InspectDataSourceDetails.RequestedOptions.snapshot_inspect_template", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="job_config", - full_name="google.privacy.dlp.v2.InspectDataSourceDetails.RequestedOptions.job_config", - index=1, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=7345, - serialized_end=7499, -) - -_INSPECTDATASOURCEDETAILS_RESULT = _descriptor.Descriptor( - name="Result", - full_name="google.privacy.dlp.v2.InspectDataSourceDetails.Result", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="processed_bytes", - full_name="google.privacy.dlp.v2.InspectDataSourceDetails.Result.processed_bytes", - index=0, - number=1, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="total_estimated_bytes", - full_name="google.privacy.dlp.v2.InspectDataSourceDetails.Result.total_estimated_bytes", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="info_type_stats", - full_name="google.privacy.dlp.v2.InspectDataSourceDetails.Result.info_type_stats", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="hybrid_stats", - full_name="google.privacy.dlp.v2.InspectDataSourceDetails.Result.hybrid_stats", - index=3, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=7502, - serialized_end=7699, -) - -_INSPECTDATASOURCEDETAILS = _descriptor.Descriptor( - name="InspectDataSourceDetails", - full_name="google.privacy.dlp.v2.InspectDataSourceDetails", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="requested_options", - full_name="google.privacy.dlp.v2.InspectDataSourceDetails.requested_options", - index=0, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="result", - full_name="google.privacy.dlp.v2.InspectDataSourceDetails.result", - index=1, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[ - _INSPECTDATASOURCEDETAILS_REQUESTEDOPTIONS, - _INSPECTDATASOURCEDETAILS_RESULT, - ], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=7151, - serialized_end=7699, -) - - -_HYBRIDINSPECTSTATISTICS = _descriptor.Descriptor( - name="HybridInspectStatistics", - full_name="google.privacy.dlp.v2.HybridInspectStatistics", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="processed_count", - full_name="google.privacy.dlp.v2.HybridInspectStatistics.processed_count", - index=0, - number=1, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="aborted_count", - full_name="google.privacy.dlp.v2.HybridInspectStatistics.aborted_count", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="pending_count", - full_name="google.privacy.dlp.v2.HybridInspectStatistics.pending_count", - index=2, - number=3, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=7701, - serialized_end=7797, -) - - -_INFOTYPEDESCRIPTION = _descriptor.Descriptor( - name="InfoTypeDescription", - full_name="google.privacy.dlp.v2.InfoTypeDescription", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.privacy.dlp.v2.InfoTypeDescription.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="display_name", - full_name="google.privacy.dlp.v2.InfoTypeDescription.display_name", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="supported_by", - full_name="google.privacy.dlp.v2.InfoTypeDescription.supported_by", - index=2, - number=3, - type=14, - cpp_type=8, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="google.privacy.dlp.v2.InfoTypeDescription.description", - index=3, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=7800, - serialized_end=7944, -) - - -_LISTINFOTYPESREQUEST = _descriptor.Descriptor( - name="ListInfoTypesRequest", - full_name="google.privacy.dlp.v2.ListInfoTypesRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.privacy.dlp.v2.ListInfoTypesRequest.parent", - index=0, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="language_code", - full_name="google.privacy.dlp.v2.ListInfoTypesRequest.language_code", - index=1, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="filter", - full_name="google.privacy.dlp.v2.ListInfoTypesRequest.filter", - index=2, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="location_id", - full_name="google.privacy.dlp.v2.ListInfoTypesRequest.location_id", - index=3, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=7946, - serialized_end=8044, -) - - -_LISTINFOTYPESRESPONSE = _descriptor.Descriptor( - name="ListInfoTypesResponse", - full_name="google.privacy.dlp.v2.ListInfoTypesResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="info_types", - full_name="google.privacy.dlp.v2.ListInfoTypesResponse.info_types", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=8046, - serialized_end=8133, -) - - -_RISKANALYSISJOBCONFIG = _descriptor.Descriptor( - name="RiskAnalysisJobConfig", - full_name="google.privacy.dlp.v2.RiskAnalysisJobConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="privacy_metric", - full_name="google.privacy.dlp.v2.RiskAnalysisJobConfig.privacy_metric", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="source_table", - full_name="google.privacy.dlp.v2.RiskAnalysisJobConfig.source_table", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="actions", - full_name="google.privacy.dlp.v2.RiskAnalysisJobConfig.actions", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=8136, - serialized_end=8329, -) - - -_QUASIID = _descriptor.Descriptor( - name="QuasiId", - full_name="google.privacy.dlp.v2.QuasiId", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="field", - full_name="google.privacy.dlp.v2.QuasiId.field", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="info_type", - full_name="google.privacy.dlp.v2.QuasiId.info_type", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="custom_tag", - full_name="google.privacy.dlp.v2.QuasiId.custom_tag", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="inferred", - full_name="google.privacy.dlp.v2.QuasiId.inferred", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="tag", - full_name="google.privacy.dlp.v2.QuasiId.tag", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=8332, - serialized_end=8520, -) - - -_STATISTICALTABLE_QUASIIDENTIFIERFIELD = _descriptor.Descriptor( - name="QuasiIdentifierField", - full_name="google.privacy.dlp.v2.StatisticalTable.QuasiIdentifierField", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="field", - full_name="google.privacy.dlp.v2.StatisticalTable.QuasiIdentifierField.field", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="custom_tag", - full_name="google.privacy.dlp.v2.StatisticalTable.QuasiIdentifierField.custom_tag", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=8752, - serialized_end=8841, -) - -_STATISTICALTABLE = _descriptor.Descriptor( - name="StatisticalTable", - full_name="google.privacy.dlp.v2.StatisticalTable", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="table", - full_name="google.privacy.dlp.v2.StatisticalTable.table", - index=0, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="quasi_ids", - full_name="google.privacy.dlp.v2.StatisticalTable.quasi_ids", - index=1, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="relative_frequency", - full_name="google.privacy.dlp.v2.StatisticalTable.relative_frequency", - index=2, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_STATISTICALTABLE_QUASIIDENTIFIERFIELD], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=8523, - serialized_end=8841, -) - - -_PRIVACYMETRIC_NUMERICALSTATSCONFIG = _descriptor.Descriptor( - name="NumericalStatsConfig", - full_name="google.privacy.dlp.v2.PrivacyMetric.NumericalStatsConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="field", - full_name="google.privacy.dlp.v2.PrivacyMetric.NumericalStatsConfig.field", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=9427, - serialized_end=9496, -) - -_PRIVACYMETRIC_CATEGORICALSTATSCONFIG = _descriptor.Descriptor( - name="CategoricalStatsConfig", - full_name="google.privacy.dlp.v2.PrivacyMetric.CategoricalStatsConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="field", - full_name="google.privacy.dlp.v2.PrivacyMetric.CategoricalStatsConfig.field", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=9498, - serialized_end=9569, -) - -_PRIVACYMETRIC_KANONYMITYCONFIG = _descriptor.Descriptor( - name="KAnonymityConfig", - full_name="google.privacy.dlp.v2.PrivacyMetric.KAnonymityConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="quasi_ids", - full_name="google.privacy.dlp.v2.PrivacyMetric.KAnonymityConfig.quasi_ids", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="entity_id", - full_name="google.privacy.dlp.v2.PrivacyMetric.KAnonymityConfig.entity_id", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=9571, - serialized_end=9692, -) - -_PRIVACYMETRIC_LDIVERSITYCONFIG = _descriptor.Descriptor( - name="LDiversityConfig", - full_name="google.privacy.dlp.v2.PrivacyMetric.LDiversityConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="quasi_ids", - full_name="google.privacy.dlp.v2.PrivacyMetric.LDiversityConfig.quasi_ids", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="sensitive_attribute", - full_name="google.privacy.dlp.v2.PrivacyMetric.LDiversityConfig.sensitive_attribute", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=9695, - serialized_end=9825, -) - -_PRIVACYMETRIC_KMAPESTIMATIONCONFIG_TAGGEDFIELD = _descriptor.Descriptor( - name="TaggedField", - full_name="google.privacy.dlp.v2.PrivacyMetric.KMapEstimationConfig.TaggedField", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="field", - full_name="google.privacy.dlp.v2.PrivacyMetric.KMapEstimationConfig.TaggedField.field", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="info_type", - full_name="google.privacy.dlp.v2.PrivacyMetric.KMapEstimationConfig.TaggedField.info_type", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="custom_tag", - full_name="google.privacy.dlp.v2.PrivacyMetric.KMapEstimationConfig.TaggedField.custom_tag", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="inferred", - full_name="google.privacy.dlp.v2.PrivacyMetric.KMapEstimationConfig.TaggedField.inferred", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="tag", - full_name="google.privacy.dlp.v2.PrivacyMetric.KMapEstimationConfig.TaggedField.tag", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=10069, - serialized_end=10261, -) - -_PRIVACYMETRIC_KMAPESTIMATIONCONFIG_AUXILIARYTABLE_QUASIIDFIELD = _descriptor.Descriptor( - name="QuasiIdField", - full_name="google.privacy.dlp.v2.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="field", - full_name="google.privacy.dlp.v2.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField.field", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="custom_tag", - full_name="google.privacy.dlp.v2.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField.custom_tag", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=10516, - serialized_end=10597, -) - -_PRIVACYMETRIC_KMAPESTIMATIONCONFIG_AUXILIARYTABLE = _descriptor.Descriptor( - name="AuxiliaryTable", - full_name="google.privacy.dlp.v2.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="table", - full_name="google.privacy.dlp.v2.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.table", - index=0, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="quasi_ids", - full_name="google.privacy.dlp.v2.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.quasi_ids", - index=1, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="relative_frequency", - full_name="google.privacy.dlp.v2.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.relative_frequency", - index=2, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_PRIVACYMETRIC_KMAPESTIMATIONCONFIG_AUXILIARYTABLE_QUASIIDFIELD], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=10264, - serialized_end=10597, -) - -_PRIVACYMETRIC_KMAPESTIMATIONCONFIG = _descriptor.Descriptor( - name="KMapEstimationConfig", - full_name="google.privacy.dlp.v2.PrivacyMetric.KMapEstimationConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="quasi_ids", - full_name="google.privacy.dlp.v2.PrivacyMetric.KMapEstimationConfig.quasi_ids", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="region_code", - full_name="google.privacy.dlp.v2.PrivacyMetric.KMapEstimationConfig.region_code", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="auxiliary_tables", - full_name="google.privacy.dlp.v2.PrivacyMetric.KMapEstimationConfig.auxiliary_tables", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[ - _PRIVACYMETRIC_KMAPESTIMATIONCONFIG_TAGGEDFIELD, - _PRIVACYMETRIC_KMAPESTIMATIONCONFIG_AUXILIARYTABLE, - ], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=9828, - serialized_end=10597, -) - -_PRIVACYMETRIC_DELTAPRESENCEESTIMATIONCONFIG = _descriptor.Descriptor( - name="DeltaPresenceEstimationConfig", - full_name="google.privacy.dlp.v2.PrivacyMetric.DeltaPresenceEstimationConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="quasi_ids", - full_name="google.privacy.dlp.v2.PrivacyMetric.DeltaPresenceEstimationConfig.quasi_ids", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="region_code", - full_name="google.privacy.dlp.v2.PrivacyMetric.DeltaPresenceEstimationConfig.region_code", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="auxiliary_tables", - full_name="google.privacy.dlp.v2.PrivacyMetric.DeltaPresenceEstimationConfig.auxiliary_tables", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=10600, - serialized_end=10775, -) - -_PRIVACYMETRIC = _descriptor.Descriptor( - name="PrivacyMetric", - full_name="google.privacy.dlp.v2.PrivacyMetric", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="numerical_stats_config", - full_name="google.privacy.dlp.v2.PrivacyMetric.numerical_stats_config", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="categorical_stats_config", - full_name="google.privacy.dlp.v2.PrivacyMetric.categorical_stats_config", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="k_anonymity_config", - full_name="google.privacy.dlp.v2.PrivacyMetric.k_anonymity_config", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="l_diversity_config", - full_name="google.privacy.dlp.v2.PrivacyMetric.l_diversity_config", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="k_map_estimation_config", - full_name="google.privacy.dlp.v2.PrivacyMetric.k_map_estimation_config", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="delta_presence_estimation_config", - full_name="google.privacy.dlp.v2.PrivacyMetric.delta_presence_estimation_config", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[ - _PRIVACYMETRIC_NUMERICALSTATSCONFIG, - _PRIVACYMETRIC_CATEGORICALSTATSCONFIG, - _PRIVACYMETRIC_KANONYMITYCONFIG, - _PRIVACYMETRIC_LDIVERSITYCONFIG, - _PRIVACYMETRIC_KMAPESTIMATIONCONFIG, - _PRIVACYMETRIC_DELTAPRESENCEESTIMATIONCONFIG, - ], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="type", - full_name="google.privacy.dlp.v2.PrivacyMetric.type", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=8844, - serialized_end=10783, -) - - -_ANALYZEDATASOURCERISKDETAILS_NUMERICALSTATSRESULT = _descriptor.Descriptor( - name="NumericalStatsResult", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.NumericalStatsResult", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="min_value", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.NumericalStatsResult.min_value", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="max_value", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.NumericalStatsResult.max_value", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="quantile_values", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.NumericalStatsResult.quantile_values", - index=2, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=11617, - serialized_end=11792, -) - -_ANALYZEDATASOURCERISKDETAILS_CATEGORICALSTATSRESULT_CATEGORICALSTATSHISTOGRAMBUCKET = _descriptor.Descriptor( - name="CategoricalStatsHistogramBucket", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="value_frequency_lower_bound", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket.value_frequency_lower_bound", - index=0, - number=1, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value_frequency_upper_bound", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket.value_frequency_upper_bound", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="bucket_size", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket.bucket_size", - index=2, - number=3, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="bucket_values", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket.bucket_values", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="bucket_value_count", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket.bucket_value_count", - index=4, - number=5, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=11974, - serialized_end=12192, -) - -_ANALYZEDATASOURCERISKDETAILS_CATEGORICALSTATSRESULT = _descriptor.Descriptor( - name="CategoricalStatsResult", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.CategoricalStatsResult", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="value_frequency_histogram_buckets", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.CategoricalStatsResult.value_frequency_histogram_buckets", - index=0, - number=5, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[ - _ANALYZEDATASOURCERISKDETAILS_CATEGORICALSTATSRESULT_CATEGORICALSTATSHISTOGRAMBUCKET - ], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=11795, - serialized_end=12192, -) - -_ANALYZEDATASOURCERISKDETAILS_KANONYMITYRESULT_KANONYMITYEQUIVALENCECLASS = _descriptor.Descriptor( - name="KAnonymityEquivalenceClass", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="quasi_ids_values", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass.quasi_ids_values", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="equivalence_class_size", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass.equivalence_class_size", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=12357, - serialized_end=12473, -) - -_ANALYZEDATASOURCERISKDETAILS_KANONYMITYRESULT_KANONYMITYHISTOGRAMBUCKET = _descriptor.Descriptor( - name="KAnonymityHistogramBucket", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="equivalence_class_size_lower_bound", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket.equivalence_class_size_lower_bound", - index=0, - number=1, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="equivalence_class_size_upper_bound", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket.equivalence_class_size_upper_bound", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="bucket_size", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket.bucket_size", - index=2, - number=3, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="bucket_values", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket.bucket_values", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="bucket_value_count", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket.bucket_value_count", - index=4, - number=5, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=12476, - serialized_end=12760, -) - -_ANALYZEDATASOURCERISKDETAILS_KANONYMITYRESULT = _descriptor.Descriptor( - name="KAnonymityResult", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KAnonymityResult", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="equivalence_class_histogram_buckets", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KAnonymityResult.equivalence_class_histogram_buckets", - index=0, - number=5, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[ - _ANALYZEDATASOURCERISKDETAILS_KANONYMITYRESULT_KANONYMITYEQUIVALENCECLASS, - _ANALYZEDATASOURCERISKDETAILS_KANONYMITYRESULT_KANONYMITYHISTOGRAMBUCKET, - ], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=12195, - serialized_end=12760, -) - -_ANALYZEDATASOURCERISKDETAILS_LDIVERSITYRESULT_LDIVERSITYEQUIVALENCECLASS = _descriptor.Descriptor( - name="LDiversityEquivalenceClass", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="quasi_ids_values", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass.quasi_ids_values", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="equivalence_class_size", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass.equivalence_class_size", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="num_distinct_sensitive_values", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass.num_distinct_sensitive_values", - index=2, - number=3, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="top_sensitive_values", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass.top_sensitive_values", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=12934, - serialized_end=13158, -) - -_ANALYZEDATASOURCERISKDETAILS_LDIVERSITYRESULT_LDIVERSITYHISTOGRAMBUCKET = _descriptor.Descriptor( - name="LDiversityHistogramBucket", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="sensitive_value_frequency_lower_bound", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket.sensitive_value_frequency_lower_bound", - index=0, - number=1, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="sensitive_value_frequency_upper_bound", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket.sensitive_value_frequency_upper_bound", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="bucket_size", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket.bucket_size", - index=2, - number=3, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="bucket_values", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket.bucket_values", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="bucket_value_count", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket.bucket_value_count", - index=4, - number=5, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=13161, - serialized_end=13451, -) - -_ANALYZEDATASOURCERISKDETAILS_LDIVERSITYRESULT = _descriptor.Descriptor( - name="LDiversityResult", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.LDiversityResult", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="sensitive_value_frequency_histogram_buckets", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.LDiversityResult.sensitive_value_frequency_histogram_buckets", - index=0, - number=5, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[ - _ANALYZEDATASOURCERISKDETAILS_LDIVERSITYRESULT_LDIVERSITYEQUIVALENCECLASS, - _ANALYZEDATASOURCERISKDETAILS_LDIVERSITYRESULT_LDIVERSITYHISTOGRAMBUCKET, - ], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=12763, - serialized_end=13451, -) - -_ANALYZEDATASOURCERISKDETAILS_KMAPESTIMATIONRESULT_KMAPESTIMATIONQUASIIDVALUES = _descriptor.Descriptor( - name="KMapEstimationQuasiIdValues", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="quasi_ids_values", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues.quasi_ids_values", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="estimated_anonymity", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues.estimated_anonymity", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=13619, - serialized_end=13733, -) - -_ANALYZEDATASOURCERISKDETAILS_KMAPESTIMATIONRESULT_KMAPESTIMATIONHISTOGRAMBUCKET = _descriptor.Descriptor( - name="KMapEstimationHistogramBucket", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="min_anonymity", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket.min_anonymity", - index=0, - number=1, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="max_anonymity", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket.max_anonymity", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="bucket_size", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket.bucket_size", - index=2, - number=5, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="bucket_values", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket.bucket_values", - index=3, - number=6, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="bucket_value_count", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket.bucket_value_count", - index=4, - number=7, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=13736, - serialized_end=13987, -) - -_ANALYZEDATASOURCERISKDETAILS_KMAPESTIMATIONRESULT = _descriptor.Descriptor( - name="KMapEstimationResult", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KMapEstimationResult", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="k_map_estimation_histogram", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.KMapEstimationResult.k_map_estimation_histogram", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[ - _ANALYZEDATASOURCERISKDETAILS_KMAPESTIMATIONRESULT_KMAPESTIMATIONQUASIIDVALUES, - _ANALYZEDATASOURCERISKDETAILS_KMAPESTIMATIONRESULT_KMAPESTIMATIONHISTOGRAMBUCKET, - ], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=13454, - serialized_end=13987, -) - -_ANALYZEDATASOURCERISKDETAILS_DELTAPRESENCEESTIMATIONRESULT_DELTAPRESENCEESTIMATIONQUASIIDVALUES = _descriptor.Descriptor( - name="DeltaPresenceEstimationQuasiIdValues", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="quasi_ids_values", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues.quasi_ids_values", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="estimated_probability", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues.estimated_probability", - index=1, - number=2, - type=1, - cpp_type=5, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=14191, - serialized_end=14316, -) - -_ANALYZEDATASOURCERISKDETAILS_DELTAPRESENCEESTIMATIONRESULT_DELTAPRESENCEESTIMATIONHISTOGRAMBUCKET = _descriptor.Descriptor( - name="DeltaPresenceEstimationHistogramBucket", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="min_probability", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket.min_probability", - index=0, - number=1, - type=1, - cpp_type=5, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="max_probability", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket.max_probability", - index=1, - number=2, - type=1, - cpp_type=5, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="bucket_size", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket.bucket_size", - index=2, - number=5, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="bucket_values", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket.bucket_values", - index=3, - number=6, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="bucket_value_count", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket.bucket_value_count", - index=4, - number=7, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=14319, - serialized_end=14602, -) - -_ANALYZEDATASOURCERISKDETAILS_DELTAPRESENCEESTIMATIONRESULT = _descriptor.Descriptor( - name="DeltaPresenceEstimationResult", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="delta_presence_estimation_histogram", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.delta_presence_estimation_histogram", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[ - _ANALYZEDATASOURCERISKDETAILS_DELTAPRESENCEESTIMATIONRESULT_DELTAPRESENCEESTIMATIONQUASIIDVALUES, - _ANALYZEDATASOURCERISKDETAILS_DELTAPRESENCEESTIMATIONRESULT_DELTAPRESENCEESTIMATIONHISTOGRAMBUCKET, - ], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=13990, - serialized_end=14602, -) - -_ANALYZEDATASOURCERISKDETAILS = _descriptor.Descriptor( - name="AnalyzeDataSourceRiskDetails", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="requested_privacy_metric", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.requested_privacy_metric", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="requested_source_table", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.requested_source_table", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="numerical_stats_result", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.numerical_stats_result", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="categorical_stats_result", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.categorical_stats_result", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="k_anonymity_result", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.k_anonymity_result", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="l_diversity_result", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.l_diversity_result", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="k_map_estimation_result", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.k_map_estimation_result", - index=6, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="delta_presence_estimation_result", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.delta_presence_estimation_result", - index=7, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[ - _ANALYZEDATASOURCERISKDETAILS_NUMERICALSTATSRESULT, - _ANALYZEDATASOURCERISKDETAILS_CATEGORICALSTATSRESULT, - _ANALYZEDATASOURCERISKDETAILS_KANONYMITYRESULT, - _ANALYZEDATASOURCERISKDETAILS_LDIVERSITYRESULT, - _ANALYZEDATASOURCERISKDETAILS_KMAPESTIMATIONRESULT, - _ANALYZEDATASOURCERISKDETAILS_DELTAPRESENCEESTIMATIONRESULT, - ], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="result", - full_name="google.privacy.dlp.v2.AnalyzeDataSourceRiskDetails.result", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=10786, - serialized_end=14612, -) - - -_VALUEFREQUENCY = _descriptor.Descriptor( - name="ValueFrequency", - full_name="google.privacy.dlp.v2.ValueFrequency", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="value", - full_name="google.privacy.dlp.v2.ValueFrequency.value", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="count", - full_name="google.privacy.dlp.v2.ValueFrequency.count", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=14614, - serialized_end=14690, -) - - -_VALUE = _descriptor.Descriptor( - name="Value", - full_name="google.privacy.dlp.v2.Value", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="integer_value", - full_name="google.privacy.dlp.v2.Value.integer_value", - index=0, - number=1, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="float_value", - full_name="google.privacy.dlp.v2.Value.float_value", - index=1, - number=2, - type=1, - cpp_type=5, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="string_value", - full_name="google.privacy.dlp.v2.Value.string_value", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="boolean_value", - full_name="google.privacy.dlp.v2.Value.boolean_value", - index=3, - number=4, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="timestamp_value", - full_name="google.privacy.dlp.v2.Value.timestamp_value", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="time_value", - full_name="google.privacy.dlp.v2.Value.time_value", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="date_value", - full_name="google.privacy.dlp.v2.Value.date_value", - index=6, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="day_of_week_value", - full_name="google.privacy.dlp.v2.Value.day_of_week_value", - index=7, - number=8, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="type", - full_name="google.privacy.dlp.v2.Value.type", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=14693, - serialized_end=15000, -) - - -_QUOTEINFO = _descriptor.Descriptor( - name="QuoteInfo", - full_name="google.privacy.dlp.v2.QuoteInfo", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="date_time", - full_name="google.privacy.dlp.v2.QuoteInfo.date_time", - index=0, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="parsed_quote", - full_name="google.privacy.dlp.v2.QuoteInfo.parsed_quote", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=15002, - serialized_end=15083, -) - - -_DATETIME_TIMEZONE = _descriptor.Descriptor( - name="TimeZone", - full_name="google.privacy.dlp.v2.DateTime.TimeZone", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="offset_minutes", - full_name="google.privacy.dlp.v2.DateTime.TimeZone.offset_minutes", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=15275, - serialized_end=15309, -) - -_DATETIME = _descriptor.Descriptor( - name="DateTime", - full_name="google.privacy.dlp.v2.DateTime", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="date", - full_name="google.privacy.dlp.v2.DateTime.date", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="day_of_week", - full_name="google.privacy.dlp.v2.DateTime.day_of_week", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="time", - full_name="google.privacy.dlp.v2.DateTime.time", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="time_zone", - full_name="google.privacy.dlp.v2.DateTime.time_zone", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_DATETIME_TIMEZONE], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=15086, - serialized_end=15309, -) - - -_DEIDENTIFYCONFIG = _descriptor.Descriptor( - name="DeidentifyConfig", - full_name="google.privacy.dlp.v2.DeidentifyConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="info_type_transformations", - full_name="google.privacy.dlp.v2.DeidentifyConfig.info_type_transformations", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="record_transformations", - full_name="google.privacy.dlp.v2.DeidentifyConfig.record_transformations", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="transformation_error_handling", - full_name="google.privacy.dlp.v2.DeidentifyConfig.transformation_error_handling", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="transformation", - full_name="google.privacy.dlp.v2.DeidentifyConfig.transformation", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=15312, - serialized_end=15604, -) - - -_TRANSFORMATIONERRORHANDLING_THROWERROR = _descriptor.Descriptor( - name="ThrowError", - full_name="google.privacy.dlp.v2.TransformationErrorHandling.ThrowError", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=15826, - serialized_end=15838, -) - -_TRANSFORMATIONERRORHANDLING_LEAVEUNTRANSFORMED = _descriptor.Descriptor( - name="LeaveUntransformed", - full_name="google.privacy.dlp.v2.TransformationErrorHandling.LeaveUntransformed", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=15840, - serialized_end=15860, -) - -_TRANSFORMATIONERRORHANDLING = _descriptor.Descriptor( - name="TransformationErrorHandling", - full_name="google.privacy.dlp.v2.TransformationErrorHandling", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="throw_error", - full_name="google.privacy.dlp.v2.TransformationErrorHandling.throw_error", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="leave_untransformed", - full_name="google.privacy.dlp.v2.TransformationErrorHandling.leave_untransformed", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[ - _TRANSFORMATIONERRORHANDLING_THROWERROR, - _TRANSFORMATIONERRORHANDLING_LEAVEUNTRANSFORMED, - ], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="mode", - full_name="google.privacy.dlp.v2.TransformationErrorHandling.mode", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=15607, - serialized_end=15868, -) - - -_PRIMITIVETRANSFORMATION = _descriptor.Descriptor( - name="PrimitiveTransformation", - full_name="google.privacy.dlp.v2.PrimitiveTransformation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="replace_config", - full_name="google.privacy.dlp.v2.PrimitiveTransformation.replace_config", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="redact_config", - full_name="google.privacy.dlp.v2.PrimitiveTransformation.redact_config", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="character_mask_config", - full_name="google.privacy.dlp.v2.PrimitiveTransformation.character_mask_config", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="crypto_replace_ffx_fpe_config", - full_name="google.privacy.dlp.v2.PrimitiveTransformation.crypto_replace_ffx_fpe_config", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="fixed_size_bucketing_config", - full_name="google.privacy.dlp.v2.PrimitiveTransformation.fixed_size_bucketing_config", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="bucketing_config", - full_name="google.privacy.dlp.v2.PrimitiveTransformation.bucketing_config", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="replace_with_info_type_config", - full_name="google.privacy.dlp.v2.PrimitiveTransformation.replace_with_info_type_config", - index=6, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="time_part_config", - full_name="google.privacy.dlp.v2.PrimitiveTransformation.time_part_config", - index=7, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="crypto_hash_config", - full_name="google.privacy.dlp.v2.PrimitiveTransformation.crypto_hash_config", - index=8, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="date_shift_config", - full_name="google.privacy.dlp.v2.PrimitiveTransformation.date_shift_config", - index=9, - number=11, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="crypto_deterministic_config", - full_name="google.privacy.dlp.v2.PrimitiveTransformation.crypto_deterministic_config", - index=10, - number=12, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="transformation", - full_name="google.privacy.dlp.v2.PrimitiveTransformation.transformation", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=15871, - serialized_end=16756, -) - - -_TIMEPARTCONFIG = _descriptor.Descriptor( - name="TimePartConfig", - full_name="google.privacy.dlp.v2.TimePartConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="part_to_extract", - full_name="google.privacy.dlp.v2.TimePartConfig.part_to_extract", - index=0, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[_TIMEPARTCONFIG_TIMEPART], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=16759, - serialized_end=16979, -) - - -_CRYPTOHASHCONFIG = _descriptor.Descriptor( - name="CryptoHashConfig", - full_name="google.privacy.dlp.v2.CryptoHashConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="crypto_key", - full_name="google.privacy.dlp.v2.CryptoHashConfig.crypto_key", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=16981, - serialized_end=17053, -) - - -_CRYPTODETERMINISTICCONFIG = _descriptor.Descriptor( - name="CryptoDeterministicConfig", - full_name="google.privacy.dlp.v2.CryptoDeterministicConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="crypto_key", - full_name="google.privacy.dlp.v2.CryptoDeterministicConfig.crypto_key", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="surrogate_info_type", - full_name="google.privacy.dlp.v2.CryptoDeterministicConfig.surrogate_info_type", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="context", - full_name="google.privacy.dlp.v2.CryptoDeterministicConfig.context", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=17056, - serialized_end=17248, -) - - -_REPLACEVALUECONFIG = _descriptor.Descriptor( - name="ReplaceValueConfig", - full_name="google.privacy.dlp.v2.ReplaceValueConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="new_value", - full_name="google.privacy.dlp.v2.ReplaceValueConfig.new_value", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=17250, - serialized_end=17319, -) - - -_REPLACEWITHINFOTYPECONFIG = _descriptor.Descriptor( - name="ReplaceWithInfoTypeConfig", - full_name="google.privacy.dlp.v2.ReplaceWithInfoTypeConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=17321, - serialized_end=17348, -) - - -_REDACTCONFIG = _descriptor.Descriptor( - name="RedactConfig", - full_name="google.privacy.dlp.v2.RedactConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=17350, - serialized_end=17364, -) - - -_CHARSTOIGNORE = _descriptor.Descriptor( - name="CharsToIgnore", - full_name="google.privacy.dlp.v2.CharsToIgnore", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="characters_to_skip", - full_name="google.privacy.dlp.v2.CharsToIgnore.characters_to_skip", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="common_characters_to_ignore", - full_name="google.privacy.dlp.v2.CharsToIgnore.common_characters_to_ignore", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_CHARSTOIGNORE_COMMONCHARSTOIGNORE], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="characters", - full_name="google.privacy.dlp.v2.CharsToIgnore.characters", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=17367, - serialized_end=17677, -) - - -_CHARACTERMASKCONFIG = _descriptor.Descriptor( - name="CharacterMaskConfig", - full_name="google.privacy.dlp.v2.CharacterMaskConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="masking_character", - full_name="google.privacy.dlp.v2.CharacterMaskConfig.masking_character", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="number_to_mask", - full_name="google.privacy.dlp.v2.CharacterMaskConfig.number_to_mask", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="reverse_order", - full_name="google.privacy.dlp.v2.CharacterMaskConfig.reverse_order", - index=2, - number=3, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="characters_to_ignore", - full_name="google.privacy.dlp.v2.CharacterMaskConfig.characters_to_ignore", - index=3, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=17680, - serialized_end=17843, -) - - -_FIXEDSIZEBUCKETINGCONFIG = _descriptor.Descriptor( - name="FixedSizeBucketingConfig", - full_name="google.privacy.dlp.v2.FixedSizeBucketingConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="lower_bound", - full_name="google.privacy.dlp.v2.FixedSizeBucketingConfig.lower_bound", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="upper_bound", - full_name="google.privacy.dlp.v2.FixedSizeBucketingConfig.upper_bound", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="bucket_size", - full_name="google.privacy.dlp.v2.FixedSizeBucketingConfig.bucket_size", - index=2, - number=3, - type=1, - cpp_type=5, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=17846, - serialized_end=18010, -) - - -_BUCKETINGCONFIG_BUCKET = _descriptor.Descriptor( - name="Bucket", - full_name="google.privacy.dlp.v2.BucketingConfig.Bucket", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="min", - full_name="google.privacy.dlp.v2.BucketingConfig.Bucket.min", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="max", - full_name="google.privacy.dlp.v2.BucketingConfig.Bucket.max", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="replacement_value", - full_name="google.privacy.dlp.v2.BucketingConfig.Bucket.replacement_value", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=18097, - serialized_end=18248, -) - -_BUCKETINGCONFIG = _descriptor.Descriptor( - name="BucketingConfig", - full_name="google.privacy.dlp.v2.BucketingConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="buckets", - full_name="google.privacy.dlp.v2.BucketingConfig.buckets", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[_BUCKETINGCONFIG_BUCKET], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=18013, - serialized_end=18248, -) - - -_CRYPTOREPLACEFFXFPECONFIG = _descriptor.Descriptor( - name="CryptoReplaceFfxFpeConfig", - full_name="google.privacy.dlp.v2.CryptoReplaceFfxFpeConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="crypto_key", - full_name="google.privacy.dlp.v2.CryptoReplaceFfxFpeConfig.crypto_key", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="context", - full_name="google.privacy.dlp.v2.CryptoReplaceFfxFpeConfig.context", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="common_alphabet", - full_name="google.privacy.dlp.v2.CryptoReplaceFfxFpeConfig.common_alphabet", - index=2, - number=4, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="custom_alphabet", - full_name="google.privacy.dlp.v2.CryptoReplaceFfxFpeConfig.custom_alphabet", - index=3, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="radix", - full_name="google.privacy.dlp.v2.CryptoReplaceFfxFpeConfig.radix", - index=4, - number=6, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="surrogate_info_type", - full_name="google.privacy.dlp.v2.CryptoReplaceFfxFpeConfig.surrogate_info_type", - index=5, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_CRYPTOREPLACEFFXFPECONFIG_FFXCOMMONNATIVEALPHABET], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="alphabet", - full_name="google.privacy.dlp.v2.CryptoReplaceFfxFpeConfig.alphabet", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=18251, - serialized_end=18756, -) - - -_CRYPTOKEY = _descriptor.Descriptor( - name="CryptoKey", - full_name="google.privacy.dlp.v2.CryptoKey", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="transient", - full_name="google.privacy.dlp.v2.CryptoKey.transient", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="unwrapped", - full_name="google.privacy.dlp.v2.CryptoKey.unwrapped", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="kms_wrapped", - full_name="google.privacy.dlp.v2.CryptoKey.kms_wrapped", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="source", - full_name="google.privacy.dlp.v2.CryptoKey.source", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=18759, - serialized_end=18975, -) - - -_TRANSIENTCRYPTOKEY = _descriptor.Descriptor( - name="TransientCryptoKey", - full_name="google.privacy.dlp.v2.TransientCryptoKey", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.privacy.dlp.v2.TransientCryptoKey.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=18977, - serialized_end=19016, -) - - -_UNWRAPPEDCRYPTOKEY = _descriptor.Descriptor( - name="UnwrappedCryptoKey", - full_name="google.privacy.dlp.v2.UnwrappedCryptoKey", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.privacy.dlp.v2.UnwrappedCryptoKey.key", - index=0, - number=1, - type=12, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"", - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=19018, - serialized_end=19056, -) - - -_KMSWRAPPEDCRYPTOKEY = _descriptor.Descriptor( - name="KmsWrappedCryptoKey", - full_name="google.privacy.dlp.v2.KmsWrappedCryptoKey", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="wrapped_key", - full_name="google.privacy.dlp.v2.KmsWrappedCryptoKey.wrapped_key", - index=0, - number=1, - type=12, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"", - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="crypto_key_name", - full_name="google.privacy.dlp.v2.KmsWrappedCryptoKey.crypto_key_name", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=19058, - serialized_end=19135, -) - - -_DATESHIFTCONFIG = _descriptor.Descriptor( - name="DateShiftConfig", - full_name="google.privacy.dlp.v2.DateShiftConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="upper_bound_days", - full_name="google.privacy.dlp.v2.DateShiftConfig.upper_bound_days", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="lower_bound_days", - full_name="google.privacy.dlp.v2.DateShiftConfig.lower_bound_days", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="context", - full_name="google.privacy.dlp.v2.DateShiftConfig.context", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="crypto_key", - full_name="google.privacy.dlp.v2.DateShiftConfig.crypto_key", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="method", - full_name="google.privacy.dlp.v2.DateShiftConfig.method", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=19138, - serialized_end=19332, -) - - -_INFOTYPETRANSFORMATIONS_INFOTYPETRANSFORMATION = _descriptor.Descriptor( - name="InfoTypeTransformation", - full_name="google.privacy.dlp.v2.InfoTypeTransformations.InfoTypeTransformation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="info_types", - full_name="google.privacy.dlp.v2.InfoTypeTransformations.InfoTypeTransformation.info_types", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="primitive_transformation", - full_name="google.privacy.dlp.v2.InfoTypeTransformations.InfoTypeTransformation.primitive_transformation", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=19464, - serialized_end=19628, -) - -_INFOTYPETRANSFORMATIONS = _descriptor.Descriptor( - name="InfoTypeTransformations", - full_name="google.privacy.dlp.v2.InfoTypeTransformations", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="transformations", - full_name="google.privacy.dlp.v2.InfoTypeTransformations.transformations", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[_INFOTYPETRANSFORMATIONS_INFOTYPETRANSFORMATION], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=19335, - serialized_end=19628, -) - - -_FIELDTRANSFORMATION = _descriptor.Descriptor( - name="FieldTransformation", - full_name="google.privacy.dlp.v2.FieldTransformation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="fields", - full_name="google.privacy.dlp.v2.FieldTransformation.fields", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="condition", - full_name="google.privacy.dlp.v2.FieldTransformation.condition", - index=1, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="primitive_transformation", - full_name="google.privacy.dlp.v2.FieldTransformation.primitive_transformation", - index=2, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="info_type_transformations", - full_name="google.privacy.dlp.v2.FieldTransformation.info_type_transformations", - index=3, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="transformation", - full_name="google.privacy.dlp.v2.FieldTransformation.transformation", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=19631, - serialized_end=19951, -) - - -_RECORDTRANSFORMATIONS = _descriptor.Descriptor( - name="RecordTransformations", - full_name="google.privacy.dlp.v2.RecordTransformations", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="field_transformations", - full_name="google.privacy.dlp.v2.RecordTransformations.field_transformations", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="record_suppressions", - full_name="google.privacy.dlp.v2.RecordTransformations.record_suppressions", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=19954, - serialized_end=20123, -) - - -_RECORDSUPPRESSION = _descriptor.Descriptor( - name="RecordSuppression", - full_name="google.privacy.dlp.v2.RecordSuppression", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="condition", - full_name="google.privacy.dlp.v2.RecordSuppression.condition", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=20125, - serialized_end=20203, -) - - -_RECORDCONDITION_CONDITION = _descriptor.Descriptor( - name="Condition", - full_name="google.privacy.dlp.v2.RecordCondition.Condition", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="field", - full_name="google.privacy.dlp.v2.RecordCondition.Condition.field", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="operator", - full_name="google.privacy.dlp.v2.RecordCondition.Condition.operator", - index=1, - number=3, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.privacy.dlp.v2.RecordCondition.Condition.value", - index=2, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=20299, - serialized_end=20473, -) - -_RECORDCONDITION_CONDITIONS = _descriptor.Descriptor( - name="Conditions", - full_name="google.privacy.dlp.v2.RecordCondition.Conditions", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="conditions", - full_name="google.privacy.dlp.v2.RecordCondition.Conditions.conditions", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=20475, - serialized_end=20557, -) - -_RECORDCONDITION_EXPRESSIONS = _descriptor.Descriptor( - name="Expressions", - full_name="google.privacy.dlp.v2.RecordCondition.Expressions", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="logical_operator", - full_name="google.privacy.dlp.v2.RecordCondition.Expressions.logical_operator", - index=0, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="conditions", - full_name="google.privacy.dlp.v2.RecordCondition.Expressions.conditions", - index=1, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_RECORDCONDITION_EXPRESSIONS_LOGICALOPERATOR], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="type", - full_name="google.privacy.dlp.v2.RecordCondition.Expressions.type", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=20560, - serialized_end=20810, -) - -_RECORDCONDITION = _descriptor.Descriptor( - name="RecordCondition", - full_name="google.privacy.dlp.v2.RecordCondition", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="expressions", - full_name="google.privacy.dlp.v2.RecordCondition.expressions", - index=0, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[ - _RECORDCONDITION_CONDITION, - _RECORDCONDITION_CONDITIONS, - _RECORDCONDITION_EXPRESSIONS, - ], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=20206, - serialized_end=20810, -) - - -_TRANSFORMATIONOVERVIEW = _descriptor.Descriptor( - name="TransformationOverview", - full_name="google.privacy.dlp.v2.TransformationOverview", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="transformed_bytes", - full_name="google.privacy.dlp.v2.TransformationOverview.transformed_bytes", - index=0, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="transformation_summaries", - full_name="google.privacy.dlp.v2.TransformationOverview.transformation_summaries", - index=1, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=20813, - serialized_end=20944, -) - - -_TRANSFORMATIONSUMMARY_SUMMARYRESULT = _descriptor.Descriptor( - name="SummaryResult", - full_name="google.privacy.dlp.v2.TransformationSummary.SummaryResult", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="count", - full_name="google.privacy.dlp.v2.TransformationSummary.SummaryResult.count", - index=0, - number=1, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="code", - full_name="google.privacy.dlp.v2.TransformationSummary.SummaryResult.code", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="details", - full_name="google.privacy.dlp.v2.TransformationSummary.SummaryResult.details", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=21390, - serialized_end=21522, -) - -_TRANSFORMATIONSUMMARY = _descriptor.Descriptor( - name="TransformationSummary", - full_name="google.privacy.dlp.v2.TransformationSummary", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="info_type", - full_name="google.privacy.dlp.v2.TransformationSummary.info_type", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="field", - full_name="google.privacy.dlp.v2.TransformationSummary.field", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="transformation", - full_name="google.privacy.dlp.v2.TransformationSummary.transformation", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="field_transformations", - full_name="google.privacy.dlp.v2.TransformationSummary.field_transformations", - index=3, - number=5, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="record_suppress", - full_name="google.privacy.dlp.v2.TransformationSummary.record_suppress", - index=4, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="results", - full_name="google.privacy.dlp.v2.TransformationSummary.results", - index=5, - number=4, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="transformed_bytes", - full_name="google.privacy.dlp.v2.TransformationSummary.transformed_bytes", - index=6, - number=7, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_TRANSFORMATIONSUMMARY_SUMMARYRESULT], - enum_types=[_TRANSFORMATIONSUMMARY_TRANSFORMATIONRESULTCODE], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=20947, - serialized_end=21618, -) - - -_SCHEDULE = _descriptor.Descriptor( - name="Schedule", - full_name="google.privacy.dlp.v2.Schedule", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="recurrence_period_duration", - full_name="google.privacy.dlp.v2.Schedule.recurrence_period_duration", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="option", - full_name="google.privacy.dlp.v2.Schedule.option", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=21620, - serialized_end=21705, -) - - -_MANUAL = _descriptor.Descriptor( - name="Manual", - full_name="google.privacy.dlp.v2.Manual", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=21707, - serialized_end=21715, -) - - -_INSPECTTEMPLATE = _descriptor.Descriptor( - name="InspectTemplate", - full_name="google.privacy.dlp.v2.InspectTemplate", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.privacy.dlp.v2.InspectTemplate.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\003", - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="display_name", - full_name="google.privacy.dlp.v2.InspectTemplate.display_name", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="google.privacy.dlp.v2.InspectTemplate.description", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="create_time", - full_name="google.privacy.dlp.v2.InspectTemplate.create_time", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\003", - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_time", - full_name="google.privacy.dlp.v2.InspectTemplate.update_time", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\003", - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="inspect_config", - full_name="google.privacy.dlp.v2.InspectTemplate.inspect_config", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b'\352A\302\002\n"dlp.googleapis.com/InspectTemplate\022@organizations/{organization}/inspectTemplates/{inspect_template}\0226projects/{project}/inspectTemplates/{inspect_template}\022Uorganizations/{organization}/locations/{location}/inspectTemplates/{inspect_template}\022Kprojects/{project}/locations/{location}/inspectTemplates/{inspect_template}', - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=21718, - serialized_end=22296, -) - - -_DEIDENTIFYTEMPLATE = _descriptor.Descriptor( - name="DeidentifyTemplate", - full_name="google.privacy.dlp.v2.DeidentifyTemplate", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.privacy.dlp.v2.DeidentifyTemplate.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\003", - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="display_name", - full_name="google.privacy.dlp.v2.DeidentifyTemplate.display_name", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="description", - full_name="google.privacy.dlp.v2.DeidentifyTemplate.description", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="create_time", - full_name="google.privacy.dlp.v2.DeidentifyTemplate.create_time", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\003", - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="update_time", - full_name="google.privacy.dlp.v2.DeidentifyTemplate.update_time", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\003", - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="deidentify_config", - full_name="google.privacy.dlp.v2.DeidentifyTemplate.deidentify_config", - index=5, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"\352A\335\002\n%dlp.googleapis.com/DeidentifyTemplate\022Forganizations/{organization}/deidentifyTemplates/{deidentify_template}\022 LOW 31-65 -> MEDIUM 66-100 -> HIGH - This can be used on data of type: number, long, string, timestamp. If - the bound ``Value`` type differs from the type of data being - transformed, we will first attempt converting the type of the data to - be transformed to match the type of the bound before comparing. See - https://cloud.google.com/dlp/docs/concepts-bucketing to learn more. - Attributes: - buckets: - Set of buckets. Ranges must be non-overlapping. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.BucketingConfig) - }, -) -_sym_db.RegisterMessage(BucketingConfig) -_sym_db.RegisterMessage(BucketingConfig.Bucket) - -CryptoReplaceFfxFpeConfig = _reflection.GeneratedProtocolMessageType( - "CryptoReplaceFfxFpeConfig", - (_message.Message,), - { - "DESCRIPTOR": _CRYPTOREPLACEFFXFPECONFIG, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Replaces an identifier with a surrogate using Format Preserving - Encryption (FPE) with the FFX mode of operation; however when used in - the ``ReidentifyContent`` API method, it serves the opposite function - by reversing the surrogate back into the original identifier. The - identifier must be encoded as ASCII. For a given crypto key and - context, the same identifier will be replaced with the same surrogate. - Identifiers must be at least two characters long. In the case that the - identifier is the empty string, it will be skipped. See - https://cloud.google.com/dlp/docs/pseudonymization to learn more. - Note: We recommend using CryptoDeterministicConfig for all use cases - which do not require preserving the input alphabet space and size, - plus warrant referential integrity. - Attributes: - crypto_key: - Required. The key used by the encryption algorithm. - context: - The ‘tweak’, a context may be used for higher security since - the same identifier in two different contexts won’t be given - the same surrogate. If the context is not set, a default tweak - will be used. If the context is set but: 1. there is no - record present when transforming a given value or 2. the field - is not present when transforming a given value, a default - tweak will be used. Note that case (1) is expected when an - ``InfoTypeTransformation`` is applied to both structured and - non-structured ``ContentItem``\ s. Currently, the referenced - field may be of value type integer or string. The tweak is - constructed as a sequence of bytes in big endian byte order - such that: - a 64 bit integer is encoded followed by a - single byte of value 1 - a string is encoded in UTF-8 format - followed by a single byte of value 2 - alphabet: - Choose an alphabet which the data being transformed will be - made up of. - common_alphabet: - Common alphabets. - custom_alphabet: - This is supported by mapping these to the alphanumeric - characters that the FFX mode natively supports. This happens - before/after encryption/decryption. Each character listed must - appear only once. Number of characters must be in the range - [2, 95]. This must be encoded as ASCII. The order of - characters does not matter. - radix: - The native way to select the alphabet. Must be in the range - [2, 95]. - surrogate_info_type: - The custom infoType to annotate the surrogate with. This - annotation will be applied to the surrogate by prefixing it - with the name of the custom infoType followed by the number of - characters comprising the surrogate. The following scheme - defines the format: - info_type_name(surrogate_character_count):surrogate For - example, if the name of custom infoType is - ‘MY_TOKEN_INFO_TYPE’ and the surrogate is ‘abc’, the full - replacement value will be: ‘MY_TOKEN_INFO_TYPE(3):abc’ This - annotation identifies the surrogate when inspecting content - using the custom infoType ```SurrogateType`` - `__. - This facilitates reversal of the surrogate when it occurs in - free text. In order for inspection to work properly, the name - of this infoType must not occur naturally anywhere in your - data; otherwise, inspection may find a surrogate that does not - correspond to an actual identifier. Therefore, choose your - custom infoType name carefully after considering what your - data looks like. One way to select a name that has a high - chance of yielding reliable detection is to include one or - more unicode characters that are highly improbable to exist in - your data. For example, assuming your data is entered from a - regular ASCII keyboard, the symbol with the hex code point - 29DD might be used like so: ⧝MY_TOKEN_TYPE - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.CryptoReplaceFfxFpeConfig) - }, -) -_sym_db.RegisterMessage(CryptoReplaceFfxFpeConfig) - -CryptoKey = _reflection.GeneratedProtocolMessageType( - "CryptoKey", - (_message.Message,), - { - "DESCRIPTOR": _CRYPTOKEY, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """This is a data encryption key (DEK) (as opposed to a key encryption - key (KEK) stored by KMS). When using KMS to wrap/unwrap DEKs, be sure - to set an appropriate IAM policy on the KMS CryptoKey (KEK) to ensure - an attacker cannot unwrap the data crypto key. - Attributes: - source: - Sources of crypto keys. - transient: - Transient crypto key - unwrapped: - Unwrapped crypto key - kms_wrapped: - Kms wrapped key - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.CryptoKey) - }, -) -_sym_db.RegisterMessage(CryptoKey) - -TransientCryptoKey = _reflection.GeneratedProtocolMessageType( - "TransientCryptoKey", - (_message.Message,), - { - "DESCRIPTOR": _TRANSIENTCRYPTOKEY, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Use this to have a random data crypto key generated. It will be - discarded after the request finishes. - Attributes: - name: - Required. Name of the key. This is an arbitrary string used to - differentiate different keys. A unique key is generated per - name: two separate ``TransientCryptoKey`` protos share the - same generated key if their names are the same. When the data - crypto key is generated, this name is not used in any way - (repeating the api call will result in a different key being - generated). - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.TransientCryptoKey) - }, -) -_sym_db.RegisterMessage(TransientCryptoKey) - -UnwrappedCryptoKey = _reflection.GeneratedProtocolMessageType( - "UnwrappedCryptoKey", - (_message.Message,), - { - "DESCRIPTOR": _UNWRAPPEDCRYPTOKEY, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Using raw keys is prone to security risks due to accidentally leaking - the key. Choose another type of key if possible. - Attributes: - key: - Required. A 128/192/256 bit key. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.UnwrappedCryptoKey) - }, -) -_sym_db.RegisterMessage(UnwrappedCryptoKey) - -KmsWrappedCryptoKey = _reflection.GeneratedProtocolMessageType( - "KmsWrappedCryptoKey", - (_message.Message,), - { - "DESCRIPTOR": _KMSWRAPPEDCRYPTOKEY, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Include to use an existing data crypto key wrapped by KMS. The wrapped - key must be a 128/192/256 bit key. Authorization requires the - following IAM permissions when sending a request to perform a crypto - transformation using a kms-wrapped crypto key: dlp.kms.encrypt - Attributes: - wrapped_key: - Required. The wrapped data crypto key. - crypto_key_name: - Required. The resource name of the KMS CryptoKey to use for - unwrapping. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.KmsWrappedCryptoKey) - }, -) -_sym_db.RegisterMessage(KmsWrappedCryptoKey) - -DateShiftConfig = _reflection.GeneratedProtocolMessageType( - "DateShiftConfig", - (_message.Message,), - { - "DESCRIPTOR": _DATESHIFTCONFIG, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Shifts dates by random number of days, with option to be consistent - for the same context. See https://cloud.google.com/dlp/docs/concepts- - date-shifting to learn more. - Attributes: - upper_bound_days: - Required. Range of shift in days. Actual shift will be - selected at random within this range (inclusive ends). - Negative means shift to earlier in time. Must not be more than - 365250 days (1000 years) each direction. For example, 3 means - shift date to at most 3 days into the future. - lower_bound_days: - Required. For example, -5 means shift date to at most 5 days - back in the past. - context: - Points to the field that contains the context, for example, an - entity id. If set, must also set cryptoKey. If set, shift will - be consistent for the given context. - method: - Method for calculating shift that takes context into - consideration. If set, must also set context. Can only be - applied to table items. - crypto_key: - Causes the shift to be computed based on this key and the - context. This results in the same shift for the same context - and crypto_key. If set, must also set context. Can only be - applied to table items. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.DateShiftConfig) - }, -) -_sym_db.RegisterMessage(DateShiftConfig) - -InfoTypeTransformations = _reflection.GeneratedProtocolMessageType( - "InfoTypeTransformations", - (_message.Message,), - { - "InfoTypeTransformation": _reflection.GeneratedProtocolMessageType( - "InfoTypeTransformation", - (_message.Message,), - { - "DESCRIPTOR": _INFOTYPETRANSFORMATIONS_INFOTYPETRANSFORMATION, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """A transformation to apply to text that is identified as a specific - info_type. - Attributes: - info_types: - InfoTypes to apply the transformation to. An empty list will - cause this transformation to apply to all findings that - correspond to infoTypes that were requested in - ``InspectConfig``. - primitive_transformation: - Required. Primitive transformation to apply to the infoType. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.InfoTypeTransformations.InfoTypeTransformation) - }, - ), - "DESCRIPTOR": _INFOTYPETRANSFORMATIONS, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """A type of transformation that will scan unstructured text and apply - various ``PrimitiveTransformation``\ s to each finding, where the - transformation is applied to only values that were identified as a - specific info_type. - Attributes: - transformations: - Required. Transformation for each infoType. Cannot specify - more than one for a given infoType. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.InfoTypeTransformations) - }, -) -_sym_db.RegisterMessage(InfoTypeTransformations) -_sym_db.RegisterMessage(InfoTypeTransformations.InfoTypeTransformation) - -FieldTransformation = _reflection.GeneratedProtocolMessageType( - "FieldTransformation", - (_message.Message,), - { - "DESCRIPTOR": _FIELDTRANSFORMATION, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """The transformation to apply to the field. - Attributes: - fields: - Required. Input field(s) to apply the transformation to. - condition: - Only apply the transformation if the condition evaluates to - true for the given ``RecordCondition``. The conditions are - allowed to reference fields that are not used in the actual - transformation. Example Use Cases: - Apply a different - bucket transformation to an age column if the zip code - column for the same record is within a specific range. - - Redact a field if the date of birth field is greater than 85. - transformation: - Transformation to apply. [required] - primitive_transformation: - Apply the transformation to the entire field. - info_type_transformations: - Treat the contents of the field as free text, and selectively - transform content that matches an ``InfoType``. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.FieldTransformation) - }, -) -_sym_db.RegisterMessage(FieldTransformation) - -RecordTransformations = _reflection.GeneratedProtocolMessageType( - "RecordTransformations", - (_message.Message,), - { - "DESCRIPTOR": _RECORDTRANSFORMATIONS, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """A type of transformation that is applied over structured data such as - a table. - Attributes: - field_transformations: - Transform the record by applying various field - transformations. - record_suppressions: - Configuration defining which records get suppressed entirely. - Records that match any suppression rule are omitted from the - output. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.RecordTransformations) - }, -) -_sym_db.RegisterMessage(RecordTransformations) - -RecordSuppression = _reflection.GeneratedProtocolMessageType( - "RecordSuppression", - (_message.Message,), - { - "DESCRIPTOR": _RECORDSUPPRESSION, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Configuration to suppress records whose suppression conditions - evaluate to true. - Attributes: - condition: - A condition that when it evaluates to true will result in the - record being evaluated to be suppressed from the transformed - content. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.RecordSuppression) - }, -) -_sym_db.RegisterMessage(RecordSuppression) - -RecordCondition = _reflection.GeneratedProtocolMessageType( - "RecordCondition", - (_message.Message,), - { - "Condition": _reflection.GeneratedProtocolMessageType( - "Condition", - (_message.Message,), - { - "DESCRIPTOR": _RECORDCONDITION_CONDITION, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2" - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.RecordCondition.Condition) - }, - ), - "Conditions": _reflection.GeneratedProtocolMessageType( - "Conditions", - (_message.Message,), - { - "DESCRIPTOR": _RECORDCONDITION_CONDITIONS, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """A collection of conditions. - Attributes: - conditions: - A collection of conditions. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.RecordCondition.Conditions) - }, - ), - "Expressions": _reflection.GeneratedProtocolMessageType( - "Expressions", - (_message.Message,), - { - "DESCRIPTOR": _RECORDCONDITION_EXPRESSIONS, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """An expression, consisting or an operator and conditions. - Attributes: - logical_operator: - The operator to apply to the result of conditions. Default and - currently only supported value is ``AND``. - type: - Expression types. - conditions: - Conditions to apply to the expression. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.RecordCondition.Expressions) - }, - ), - "DESCRIPTOR": _RECORDCONDITION, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """The field type of ``value`` and ``field`` do not need to match to be - considered equal, but not all comparisons are possible. EQUAL_TO and - NOT_EQUAL_TO attempt to compare even with incompatible types, but all - other comparisons are invalid with incompatible types. A ``value`` of - type: - ``string`` can be compared against all other types - - ``boolean`` can only be compared against other booleans - ``integer`` - can be compared against doubles or a string if the string value can - be parsed as an integer. - ``double`` can be compared against - integers or a string if the string can be parsed as a double. - - ``Timestamp`` can be compared against strings in RFC 3339 date string - format. - ``TimeOfDay`` can be compared against timestamps and - strings in the format of ‘HH:mm:ss’. If we fail to compare do to - type mismatch, a warning will be given and the condition will evaluate - to false. - Attributes: - field: - Required. Field within the record this condition is evaluated - against. - operator: - Required. Operator used to compare the field or infoType to - the value. - value: - Value to compare against. [Mandatory, except for ``EXISTS`` - tests.] - expressions: - An expression. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.RecordCondition) - }, -) -_sym_db.RegisterMessage(RecordCondition) -_sym_db.RegisterMessage(RecordCondition.Condition) -_sym_db.RegisterMessage(RecordCondition.Conditions) -_sym_db.RegisterMessage(RecordCondition.Expressions) - -TransformationOverview = _reflection.GeneratedProtocolMessageType( - "TransformationOverview", - (_message.Message,), - { - "DESCRIPTOR": _TRANSFORMATIONOVERVIEW, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Overview of the modifications that occurred. - Attributes: - transformed_bytes: - Total size in bytes that were transformed in some way. - transformation_summaries: - Transformations applied to the dataset. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.TransformationOverview) - }, -) -_sym_db.RegisterMessage(TransformationOverview) - -TransformationSummary = _reflection.GeneratedProtocolMessageType( - "TransformationSummary", - (_message.Message,), - { - "SummaryResult": _reflection.GeneratedProtocolMessageType( - "SummaryResult", - (_message.Message,), - { - "DESCRIPTOR": _TRANSFORMATIONSUMMARY_SUMMARYRESULT, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """A collection that informs the user the number of times a particular - ``TransformationResultCode`` and error details occurred. - Attributes: - count: - Number of transformations counted by this result. - code: - Outcome of the transformation. - details: - A place for warnings or errors to show up if a transformation - didn’t work as expected. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.TransformationSummary.SummaryResult) - }, - ), - "DESCRIPTOR": _TRANSFORMATIONSUMMARY, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Summary of a single transformation. Only one of ‘transformation’, - ‘field_transformation’, or ‘record_suppress’ will be set. - Attributes: - info_type: - Set if the transformation was limited to a specific InfoType. - field: - Set if the transformation was limited to a specific FieldId. - transformation: - The specific transformation these stats apply to. - field_transformations: - The field transformation that was applied. If multiple field - transformations are requested for a single field, this list - will contain all of them; otherwise, only one is supplied. - record_suppress: - The specific suppression option these stats apply to. - results: - Collection of all transformations that took place or had an - error. - transformed_bytes: - Total size in bytes that were transformed in some way. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.TransformationSummary) - }, -) -_sym_db.RegisterMessage(TransformationSummary) -_sym_db.RegisterMessage(TransformationSummary.SummaryResult) - -Schedule = _reflection.GeneratedProtocolMessageType( - "Schedule", - (_message.Message,), - { - "DESCRIPTOR": _SCHEDULE, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Schedule for triggeredJobs. - Attributes: - recurrence_period_duration: - With this option a job is started a regular periodic basis. - For example: every day (86400 seconds). A scheduled start - time will be skipped if the previous execution has not ended - when its scheduled time occurs. This value must be set to a - time duration greater than or equal to 1 day and can be no - longer than 60 days. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.Schedule) - }, -) -_sym_db.RegisterMessage(Schedule) - -Manual = _reflection.GeneratedProtocolMessageType( - "Manual", - (_message.Message,), - { - "DESCRIPTOR": _MANUAL, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Job trigger option for hybrid jobs. Jobs must be manually created and - finished.""", - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.Manual) - }, -) -_sym_db.RegisterMessage(Manual) - -InspectTemplate = _reflection.GeneratedProtocolMessageType( - "InspectTemplate", - (_message.Message,), - { - "DESCRIPTOR": _INSPECTTEMPLATE, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """The inspectTemplate contains a configuration (set of types of - sensitive data to be detected) to be used anywhere you otherwise would - normally specify InspectConfig. See - https://cloud.google.com/dlp/docs/concepts-templates to learn more. - Attributes: - name: - Output only. The template name. The template will have one of - the following formats: - ``projects/PROJECT_ID/inspectTemplates/TEMPLATE_ID`` OR ``orga - nizations/ORGANIZATION_ID/inspectTemplates/TEMPLATE_ID``; - display_name: - Display name (max 256 chars). - description: - Short description (max 256 chars). - create_time: - Output only. The creation timestamp of an inspectTemplate. - update_time: - Output only. The last update timestamp of an inspectTemplate. - inspect_config: - The core content of the template. Configuration of the - scanning process. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.InspectTemplate) - }, -) -_sym_db.RegisterMessage(InspectTemplate) - -DeidentifyTemplate = _reflection.GeneratedProtocolMessageType( - "DeidentifyTemplate", - (_message.Message,), - { - "DESCRIPTOR": _DEIDENTIFYTEMPLATE, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """DeidentifyTemplates contains instructions on how to de-identify - content. See https://cloud.google.com/dlp/docs/concepts-templates to - learn more. - Attributes: - name: - Output only. The template name. The template will have one of - the following formats: - ``projects/PROJECT_ID/deidentifyTemplates/TEMPLATE_ID`` OR ``o - rganizations/ORGANIZATION_ID/deidentifyTemplates/TEMPLATE_ID`` - display_name: - Display name (max 256 chars). - description: - Short description (max 256 chars). - create_time: - Output only. The creation timestamp of an inspectTemplate. - update_time: - Output only. The last update timestamp of an inspectTemplate. - deidentify_config: - ///////////// // The core content of the template // /////////////// - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.DeidentifyTemplate) - }, -) -_sym_db.RegisterMessage(DeidentifyTemplate) - -Error = _reflection.GeneratedProtocolMessageType( - "Error", - (_message.Message,), - { - "DESCRIPTOR": _ERROR, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Details information about an error encountered during job execution or - the results of an unsuccessful activation of the JobTrigger. - Attributes: - details: - Detailed error codes and messages. - timestamps: - The times the error occurred. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.Error) - }, -) -_sym_db.RegisterMessage(Error) - -JobTrigger = _reflection.GeneratedProtocolMessageType( - "JobTrigger", - (_message.Message,), - { - "Trigger": _reflection.GeneratedProtocolMessageType( - "Trigger", - (_message.Message,), - { - "DESCRIPTOR": _JOBTRIGGER_TRIGGER, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2" - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.JobTrigger.Trigger) - }, - ), - "DESCRIPTOR": _JOBTRIGGER, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """What event needs to occur for a new job to be started. - Attributes: - schedule: - Create a job on a repeating basis based on the elapse of time. - manual: - For use with hybrid jobs. Jobs must be manually created and - finished. Early access feature is in a pre-release state and - might change or have limited support. For more information, - see https://cloud.google.com/products#product-launch-stages. - name: - Unique resource name for the triggeredJob, assigned by the - service when the triggeredJob is created, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - display_name: - Display name (max 100 chars) - description: - User provided description (max 256 chars) - job: - The configuration details for the specific type of job to run. - inspect_job: - For inspect jobs, a snapshot of the configuration. - triggers: - A list of triggers which will be OR’ed together. Only one in - the list needs to trigger for a job to be started. The list - may contain only a single Schedule trigger and must have at - least one object. - errors: - Output only. A stream of errors encountered when the trigger - was activated. Repeated errors may result in the JobTrigger - automatically being paused. Will return the last 100 errors. - Whenever the JobTrigger is modified this list will be cleared. - create_time: - Output only. The creation timestamp of a triggeredJob. - update_time: - Output only. The last update timestamp of a triggeredJob. - last_run_time: - Output only. The timestamp of the last time this trigger - executed. - status: - Required. A status for this trigger. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.JobTrigger) - }, -) -_sym_db.RegisterMessage(JobTrigger) -_sym_db.RegisterMessage(JobTrigger.Trigger) - -Action = _reflection.GeneratedProtocolMessageType( - "Action", - (_message.Message,), - { - "SaveFindings": _reflection.GeneratedProtocolMessageType( - "SaveFindings", - (_message.Message,), - { - "DESCRIPTOR": _ACTION_SAVEFINDINGS, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """If set, the detailed findings will be persisted to the specified - OutputStorageConfig. Only a single instance of this action can be - specified. Compatible with: Inspect, Risk - Attributes: - output_config: - Location to store findings outside of DLP. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.Action.SaveFindings) - }, - ), - "PublishToPubSub": _reflection.GeneratedProtocolMessageType( - "PublishToPubSub", - (_message.Message,), - { - "DESCRIPTOR": _ACTION_PUBLISHTOPUBSUB, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Publish a message into given Pub/Sub topic when DlpJob has completed. - The message contains a single field, ``DlpJobName``, which is equal to - the finished job’s ```DlpJob.name`` - `__. Compatible - with: Inspect, Risk - Attributes: - topic: - Cloud Pub/Sub topic to send notifications to. The topic must - have given publishing access rights to the DLP API service - account executing the long running DlpJob sending the - notifications. Format is projects/{project}/topics/{topic}. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.Action.PublishToPubSub) - }, - ), - "PublishSummaryToCscc": _reflection.GeneratedProtocolMessageType( - "PublishSummaryToCscc", - (_message.Message,), - { - "DESCRIPTOR": _ACTION_PUBLISHSUMMARYTOCSCC, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Publish the result summary of a DlpJob to the Cloud Security Command - Center (CSCC Alpha). This action is only available for projects which - are parts of an organization and whitelisted for the alpha Cloud - Security Command Center. The action will publish count of finding - instances and their info types. The summary of findings will be - persisted in CSCC and are governed by CSCC service-specific policy, - see https://cloud.google.com/terms/service-terms Only a single - instance of this action can be specified. Compatible with: Inspect""", - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.Action.PublishSummaryToCscc) - }, - ), - "PublishFindingsToCloudDataCatalog": _reflection.GeneratedProtocolMessageType( - "PublishFindingsToCloudDataCatalog", - (_message.Message,), - { - "DESCRIPTOR": _ACTION_PUBLISHFINDINGSTOCLOUDDATACATALOG, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Publish findings of a DlpJob to Cloud Data Catalog. Labels summarizing - the results of the DlpJob will be applied to the entry for the - resource scanned in Cloud Data Catalog. Any labels previously written - by another DlpJob will be deleted. InfoType naming patterns are - strictly enforced when using this feature. Note that the findings will - be persisted in Cloud Data Catalog storage and are governed by Data - Catalog service-specific policy, see - https://cloud.google.com/terms/service-terms Only a single instance of - this action can be specified and only allowed if all resources being - scanned are BigQuery tables. Compatible with: Inspect""", - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.Action.PublishFindingsToCloudDataCatalog) - }, - ), - "JobNotificationEmails": _reflection.GeneratedProtocolMessageType( - "JobNotificationEmails", - (_message.Message,), - { - "DESCRIPTOR": _ACTION_JOBNOTIFICATIONEMAILS, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Enable email notification to project owners and editors on jobs’s - completion/failure.""", - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.Action.JobNotificationEmails) - }, - ), - "PublishToStackdriver": _reflection.GeneratedProtocolMessageType( - "PublishToStackdriver", - (_message.Message,), - { - "DESCRIPTOR": _ACTION_PUBLISHTOSTACKDRIVER, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Enable Stackdriver metric dlp.googleapis.com/finding_count. This will - publish a metric to stack driver on each infotype requested and how - many findings were found for it. CustomDetectors will be bucketed as - ‘Custom’ under the Stackdriver label ‘info_type’.""", - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.Action.PublishToStackdriver) - }, - ), - "DESCRIPTOR": _ACTION, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """A task to execute on the completion of a job. See - https://cloud.google.com/dlp/docs/concepts-actions to learn more. - Attributes: - save_findings: - Save resulting findings in a provided location. - pub_sub: - Publish a notification to a pubsub topic. - publish_summary_to_cscc: - Publish summary to Cloud Security Command Center (Alpha). - publish_findings_to_cloud_data_catalog: - Publish findings to Cloud Datahub. - job_notification_emails: - Enable email notification for project owners and editors on - job’s completion/failure. - publish_to_stackdriver: - Enable Stackdriver metric dlp.googleapis.com/finding_count. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.Action) - }, -) -_sym_db.RegisterMessage(Action) -_sym_db.RegisterMessage(Action.SaveFindings) -_sym_db.RegisterMessage(Action.PublishToPubSub) -_sym_db.RegisterMessage(Action.PublishSummaryToCscc) -_sym_db.RegisterMessage(Action.PublishFindingsToCloudDataCatalog) -_sym_db.RegisterMessage(Action.JobNotificationEmails) -_sym_db.RegisterMessage(Action.PublishToStackdriver) - -CreateInspectTemplateRequest = _reflection.GeneratedProtocolMessageType( - "CreateInspectTemplateRequest", - (_message.Message,), - { - "DESCRIPTOR": _CREATEINSPECTTEMPLATEREQUEST, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Request message for CreateInspectTemplate. - Attributes: - parent: - Required. The parent resource name, for example projects/my- - project-id or organizations/my-org-id or projects/my-project- - id/locations/{location-id}. - inspect_template: - Required. The InspectTemplate to create. - template_id: - The template id can contain uppercase and lowercase letters, - numbers, and hyphens; that is, it must match the regular - expression: ``[a-zA-Z\\d-_]+``. The maximum length is 100 - characters. Can be empty to allow the system to generate one. - location_id: - Deprecated. This field has no effect. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.CreateInspectTemplateRequest) - }, -) -_sym_db.RegisterMessage(CreateInspectTemplateRequest) - -UpdateInspectTemplateRequest = _reflection.GeneratedProtocolMessageType( - "UpdateInspectTemplateRequest", - (_message.Message,), - { - "DESCRIPTOR": _UPDATEINSPECTTEMPLATEREQUEST, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Request message for UpdateInspectTemplate. - Attributes: - name: - Required. Resource name of organization and inspectTemplate to - be updated, for example - ``organizations/433245324/inspectTemplates/432452342`` or - projects/project-id/inspectTemplates/432452342. - inspect_template: - New InspectTemplate value. - update_mask: - Mask to control which fields get updated. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.UpdateInspectTemplateRequest) - }, -) -_sym_db.RegisterMessage(UpdateInspectTemplateRequest) - -GetInspectTemplateRequest = _reflection.GeneratedProtocolMessageType( - "GetInspectTemplateRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETINSPECTTEMPLATEREQUEST, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Request message for GetInspectTemplate. - Attributes: - name: - Required. Resource name of the organization and - inspectTemplate to be read, for example - ``organizations/433245324/inspectTemplates/432452342`` or - projects/project-id/inspectTemplates/432452342. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.GetInspectTemplateRequest) - }, -) -_sym_db.RegisterMessage(GetInspectTemplateRequest) - -ListInspectTemplatesRequest = _reflection.GeneratedProtocolMessageType( - "ListInspectTemplatesRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTINSPECTTEMPLATESREQUEST, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Request message for ListInspectTemplates. - Attributes: - parent: - Required. The parent resource name, for example projects/my- - project-id or organizations/my-org-id or projects/my-project- - id/locations/{location_id}. - page_token: - Page token to continue retrieval. Comes from previous call to - ``ListInspectTemplates``. - page_size: - Size of the page, can be limited by server. If zero server - returns a page of max size 100. - order_by: - Comma separated list of fields to order by, followed by - ``asc`` or ``desc`` postfix. This list is case-insensitive, - default sorting order is ascending, redundant space characters - are insignificant. Example: ``name asc,update_time, - create_time desc`` Supported fields are: - ``create_time``: - corresponds to time the template was created. - - ``update_time``: corresponds to time the template was last - updated. - ``name``: corresponds to template’s name. - - ``display_name``: corresponds to template’s display name. - location_id: - Deprecated. This field has no effect. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.ListInspectTemplatesRequest) - }, -) -_sym_db.RegisterMessage(ListInspectTemplatesRequest) - -ListInspectTemplatesResponse = _reflection.GeneratedProtocolMessageType( - "ListInspectTemplatesResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTINSPECTTEMPLATESRESPONSE, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Response message for ListInspectTemplates. - Attributes: - inspect_templates: - List of inspectTemplates, up to page_size in - ListInspectTemplatesRequest. - next_page_token: - If the next page is available then the next page token to be - used in following ListInspectTemplates request. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.ListInspectTemplatesResponse) - }, -) -_sym_db.RegisterMessage(ListInspectTemplatesResponse) - -DeleteInspectTemplateRequest = _reflection.GeneratedProtocolMessageType( - "DeleteInspectTemplateRequest", - (_message.Message,), - { - "DESCRIPTOR": _DELETEINSPECTTEMPLATEREQUEST, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Request message for DeleteInspectTemplate. - Attributes: - name: - Required. Resource name of the organization and - inspectTemplate to be deleted, for example - ``organizations/433245324/inspectTemplates/432452342`` or - projects/project-id/inspectTemplates/432452342. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.DeleteInspectTemplateRequest) - }, -) -_sym_db.RegisterMessage(DeleteInspectTemplateRequest) - -CreateJobTriggerRequest = _reflection.GeneratedProtocolMessageType( - "CreateJobTriggerRequest", - (_message.Message,), - { - "DESCRIPTOR": _CREATEJOBTRIGGERREQUEST, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Request message for CreateJobTrigger. - Attributes: - parent: - Required. The parent resource name, for example projects/my- - project-id or projects/my-project-id/locations/{location_id}. - job_trigger: - Required. The JobTrigger to create. - trigger_id: - The trigger id can contain uppercase and lowercase letters, - numbers, and hyphens; that is, it must match the regular - expression: ``[a-zA-Z\\d-_]+``. The maximum length is 100 - characters. Can be empty to allow the system to generate one. - location_id: - Deprecated. This field has no effect. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.CreateJobTriggerRequest) - }, -) -_sym_db.RegisterMessage(CreateJobTriggerRequest) - -ActivateJobTriggerRequest = _reflection.GeneratedProtocolMessageType( - "ActivateJobTriggerRequest", - (_message.Message,), - { - "DESCRIPTOR": _ACTIVATEJOBTRIGGERREQUEST, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Request message for ActivateJobTrigger. - Attributes: - name: - Required. Resource name of the trigger to activate, for - example ``projects/dlp-test-project/jobTriggers/53234423``. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.ActivateJobTriggerRequest) - }, -) -_sym_db.RegisterMessage(ActivateJobTriggerRequest) - -UpdateJobTriggerRequest = _reflection.GeneratedProtocolMessageType( - "UpdateJobTriggerRequest", - (_message.Message,), - { - "DESCRIPTOR": _UPDATEJOBTRIGGERREQUEST, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Request message for UpdateJobTrigger. - Attributes: - name: - Required. Resource name of the project and the triggeredJob, - for example ``projects/dlp-test- - project/jobTriggers/53234423``. - job_trigger: - New JobTrigger value. - update_mask: - Mask to control which fields get updated. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.UpdateJobTriggerRequest) - }, -) -_sym_db.RegisterMessage(UpdateJobTriggerRequest) - -GetJobTriggerRequest = _reflection.GeneratedProtocolMessageType( - "GetJobTriggerRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETJOBTRIGGERREQUEST, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Request message for GetJobTrigger. - Attributes: - name: - Required. Resource name of the project and the triggeredJob, - for example ``projects/dlp-test- - project/jobTriggers/53234423``. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.GetJobTriggerRequest) - }, -) -_sym_db.RegisterMessage(GetJobTriggerRequest) - -CreateDlpJobRequest = _reflection.GeneratedProtocolMessageType( - "CreateDlpJobRequest", - (_message.Message,), - { - "DESCRIPTOR": _CREATEDLPJOBREQUEST, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Request message for CreateDlpJobRequest. Used to initiate long running - jobs such as calculating risk metrics or inspecting Google Cloud - Storage. - Attributes: - parent: - Required. The parent resource name, for example projects/my- - project-id or projects/my-project-id/locations/{location_id}. - job: - The configuration details for the specific type of job to run. - inspect_job: - Set to control what and how to inspect. - risk_job: - Set to choose what metric to calculate. - job_id: - The job id can contain uppercase and lowercase letters, - numbers, and hyphens; that is, it must match the regular - expression: ``[a-zA-Z\\d-_]+``. The maximum length is 100 - characters. Can be empty to allow the system to generate one. - location_id: - Deprecated. This field has no effect. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.CreateDlpJobRequest) - }, -) -_sym_db.RegisterMessage(CreateDlpJobRequest) - -ListJobTriggersRequest = _reflection.GeneratedProtocolMessageType( - "ListJobTriggersRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTJOBTRIGGERSREQUEST, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Request message for ListJobTriggers. - Attributes: - parent: - Required. The parent resource name, for example ``projects/my- - project-id`` or projects/my-project- - id/locations/{location_id}. - page_token: - Page token to continue retrieval. Comes from previous call to - ListJobTriggers. ``order_by`` field must not change for - subsequent calls. - page_size: - Size of the page, can be limited by a server. - order_by: - Comma separated list of triggeredJob fields to order by, - followed by ``asc`` or ``desc`` postfix. This list is case- - insensitive, default sorting order is ascending, redundant - space characters are insignificant. Example: ``name - asc,update_time, create_time desc`` Supported fields are: - - ``create_time``: corresponds to time the JobTrigger was - created. - ``update_time``: corresponds to time the - JobTrigger was last updated. - ``last_run_time``: corresponds - to the last time the JobTrigger ran. - ``name``: corresponds - to JobTrigger’s name. - ``display_name``: corresponds to - JobTrigger’s display name. - ``status``: corresponds to - JobTrigger’s status. - filter: - Allows filtering. Supported syntax: - Filter expressions - are made up of one or more restrictions. - Restrictions can - be combined by ``AND`` or ``OR`` logical operators. A - sequence of restrictions implicitly uses ``AND``. - A - restriction has the form of ``{field} {operator} {value}``. - - Supported fields/values for inspect jobs: - ``status`` - - HEALTHY|PAUSED|CANCELLED - ``inspected_storage`` - - DATASTORE|CLOUD_STORAGE|BIGQUERY - ’last_run_time\` - RFC - 3339 formatted timestamp, surrounded by quotation marks. - Nanoseconds are ignored. - ‘error_count’ - Number of - errors that have occurred while running. - The operator must - be ``=`` or ``!=`` for status and inspected_storage. - Examples: - inspected_storage = cloud_storage AND status = - HEALTHY - inspected_storage = cloud_storage OR - inspected_storage = bigquery - inspected_storage = - cloud_storage AND (state = PAUSED OR state = HEALTHY) - - last_run_time > "2017-12-12T00:00:00+00:00" The length of - this field should be no more than 500 characters. - location_id: - Deprecated. This field has no effect. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.ListJobTriggersRequest) - }, -) -_sym_db.RegisterMessage(ListJobTriggersRequest) - -ListJobTriggersResponse = _reflection.GeneratedProtocolMessageType( - "ListJobTriggersResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTJOBTRIGGERSRESPONSE, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Response message for ListJobTriggers. - Attributes: - job_triggers: - List of triggeredJobs, up to page_size in - ListJobTriggersRequest. - next_page_token: - If the next page is available then the next page token to be - used in following ListJobTriggers request. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.ListJobTriggersResponse) - }, -) -_sym_db.RegisterMessage(ListJobTriggersResponse) - -DeleteJobTriggerRequest = _reflection.GeneratedProtocolMessageType( - "DeleteJobTriggerRequest", - (_message.Message,), - { - "DESCRIPTOR": _DELETEJOBTRIGGERREQUEST, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Request message for DeleteJobTrigger. - Attributes: - name: - Required. Resource name of the project and the triggeredJob, - for example ``projects/dlp-test- - project/jobTriggers/53234423``. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.DeleteJobTriggerRequest) - }, -) -_sym_db.RegisterMessage(DeleteJobTriggerRequest) - -InspectJobConfig = _reflection.GeneratedProtocolMessageType( - "InspectJobConfig", - (_message.Message,), - { - "DESCRIPTOR": _INSPECTJOBCONFIG, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Controls what and how to inspect for findings. - Attributes: - storage_config: \ - The data to scan. - inspect_config: \ - How and what to scan for. - inspect_template_name: \ - If provided, will be used as the default for all values in \ - InspectConfig. ``inspect_config`` will be merged into the \ - values persisted as part of the template. \ - actions: \ - Actions to execute at the completion of the job. \ - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.InspectJobConfig) - }, -) -_sym_db.RegisterMessage(InspectJobConfig) - -DlpJob = _reflection.GeneratedProtocolMessageType( - "DlpJob", - (_message.Message,), - { - "DESCRIPTOR": _DLPJOB, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Combines all of the information about a DLP job. - Attributes: - name: - The server-assigned name. - type: - The type of job. - state: - State of a job. - risk_details: - Results from analyzing risk of a data source. - inspect_details: - Results from inspecting a data source. - create_time: - Time when the job was created. - start_time: - Time when the job started. - end_time: - Time when the job finished. - job_trigger_name: - If created by a job trigger, the resource name of the trigger - that instantiated the job. - errors: - A stream of errors encountered running the job. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.DlpJob) - }, -) -_sym_db.RegisterMessage(DlpJob) - -GetDlpJobRequest = _reflection.GeneratedProtocolMessageType( - "GetDlpJobRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETDLPJOBREQUEST, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """The request message for [DlpJobs.GetDlpJob][]. - Attributes: - name: - Required. The name of the DlpJob resource. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.GetDlpJobRequest) - }, -) -_sym_db.RegisterMessage(GetDlpJobRequest) - -ListDlpJobsRequest = _reflection.GeneratedProtocolMessageType( - "ListDlpJobsRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTDLPJOBSREQUEST, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """The request message for listing DLP jobs. - Attributes: - parent: - Required. The parent resource name, for example projects/my- - project-id or projects/my-project-id/locations/{location_id}. - filter: - Allows filtering. Supported syntax: - Filter expressions - are made up of one or more restrictions. - Restrictions can - be combined by ``AND`` or ``OR`` logical operators. A - sequence of restrictions implicitly uses ``AND``. - A - restriction has the form of ``{field} {operator} {value}``. - - Supported fields/values for inspect jobs: - ``state`` - - PENDING|RUNNING|CANCELED|FINISHED|FAILED - - ``inspected_storage`` - DATASTORE|CLOUD_STORAGE|BIGQUERY - - ``trigger_name`` - The resource name of the trigger that - created job. - ’end_time\` - Corresponds to time the - job finished. - ’start_time\` - Corresponds to time the - job finished. - Supported fields for risk analysis jobs: - - ``state`` - RUNNING|CANCELED|FINISHED|FAILED - - ’end_time\` - Corresponds to time the job finished. - - ’start_time\` - Corresponds to time the job finished. - The - operator must be ``=`` or ``!=``. Examples: - - inspected_storage = cloud_storage AND state = done - - inspected_storage = cloud_storage OR inspected_storage = - bigquery - inspected_storage = cloud_storage AND (state = - done OR state = canceled) - end_time > - "2017-12-12T00:00:00+00:00" The length of this field should - be no more than 500 characters. - page_size: - The standard list page size. - page_token: - The standard list page token. - type: - The type of job. Defaults to ``DlpJobType.INSPECT`` - order_by: - Comma separated list of fields to order by, followed by - ``asc`` or ``desc`` postfix. This list is case-insensitive, - default sorting order is ascending, redundant space characters - are insignificant. Example: ``name asc, end_time asc, - create_time desc`` Supported fields are: - ``create_time``: - corresponds to time the job was created. - ``end_time``: - corresponds to time the job ended. - ``name``: corresponds to - job’s name. - ``state``: corresponds to ``state`` - location_id: - Deprecated. This field has no effect. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.ListDlpJobsRequest) - }, -) -_sym_db.RegisterMessage(ListDlpJobsRequest) - -ListDlpJobsResponse = _reflection.GeneratedProtocolMessageType( - "ListDlpJobsResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTDLPJOBSRESPONSE, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """The response message for listing DLP jobs. - Attributes: - jobs: - A list of DlpJobs that matches the specified filter in the - request. - next_page_token: - The standard List next-page token. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.ListDlpJobsResponse) - }, -) -_sym_db.RegisterMessage(ListDlpJobsResponse) - -CancelDlpJobRequest = _reflection.GeneratedProtocolMessageType( - "CancelDlpJobRequest", - (_message.Message,), - { - "DESCRIPTOR": _CANCELDLPJOBREQUEST, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """The request message for canceling a DLP job. - Attributes: - name: - Required. The name of the DlpJob resource to be cancelled. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.CancelDlpJobRequest) - }, -) -_sym_db.RegisterMessage(CancelDlpJobRequest) - -FinishDlpJobRequest = _reflection.GeneratedProtocolMessageType( - "FinishDlpJobRequest", - (_message.Message,), - { - "DESCRIPTOR": _FINISHDLPJOBREQUEST, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """The request message for finishing a DLP hybrid job. - Attributes: - name: - Required. The name of the DlpJob resource to be cancelled. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.FinishDlpJobRequest) - }, -) -_sym_db.RegisterMessage(FinishDlpJobRequest) - -DeleteDlpJobRequest = _reflection.GeneratedProtocolMessageType( - "DeleteDlpJobRequest", - (_message.Message,), - { - "DESCRIPTOR": _DELETEDLPJOBREQUEST, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """The request message for deleting a DLP job. - Attributes: - name: - Required. The name of the DlpJob resource to be deleted. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.DeleteDlpJobRequest) - }, -) -_sym_db.RegisterMessage(DeleteDlpJobRequest) - -CreateDeidentifyTemplateRequest = _reflection.GeneratedProtocolMessageType( - "CreateDeidentifyTemplateRequest", - (_message.Message,), - { - "DESCRIPTOR": _CREATEDEIDENTIFYTEMPLATEREQUEST, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Request message for CreateDeidentifyTemplate. - Attributes: - parent: - Required. The parent resource name, for example projects/my- - project-id or organizations/my-org-id or projects/my-project- - id/locations/{location_id}. - deidentify_template: - Required. The DeidentifyTemplate to create. - template_id: - The template id can contain uppercase and lowercase letters, - numbers, and hyphens; that is, it must match the regular - expression: ``[a-zA-Z\\d-_]+``. The maximum length is 100 - characters. Can be empty to allow the system to generate one. - location_id: - Deprecated. This field has no effect. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.CreateDeidentifyTemplateRequest) - }, -) -_sym_db.RegisterMessage(CreateDeidentifyTemplateRequest) - -UpdateDeidentifyTemplateRequest = _reflection.GeneratedProtocolMessageType( - "UpdateDeidentifyTemplateRequest", - (_message.Message,), - { - "DESCRIPTOR": _UPDATEDEIDENTIFYTEMPLATEREQUEST, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Request message for UpdateDeidentifyTemplate. - Attributes: - name: - Required. Resource name of organization and deidentify - template to be updated, for example - ``organizations/433245324/deidentifyTemplates/432452342`` or - projects/project-id/deidentifyTemplates/432452342. - deidentify_template: - New DeidentifyTemplate value. - update_mask: - Mask to control which fields get updated. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.UpdateDeidentifyTemplateRequest) - }, -) -_sym_db.RegisterMessage(UpdateDeidentifyTemplateRequest) - -GetDeidentifyTemplateRequest = _reflection.GeneratedProtocolMessageType( - "GetDeidentifyTemplateRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETDEIDENTIFYTEMPLATEREQUEST, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Request message for GetDeidentifyTemplate. - Attributes: - name: - Required. Resource name of the organization and deidentify - template to be read, for example - ``organizations/433245324/deidentifyTemplates/432452342`` or - projects/project-id/deidentifyTemplates/432452342. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.GetDeidentifyTemplateRequest) - }, -) -_sym_db.RegisterMessage(GetDeidentifyTemplateRequest) - -ListDeidentifyTemplatesRequest = _reflection.GeneratedProtocolMessageType( - "ListDeidentifyTemplatesRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTDEIDENTIFYTEMPLATESREQUEST, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Request message for ListDeidentifyTemplates. - Attributes: - parent: - Required. The parent resource name, for example projects/my- - project-id or organizations/my-org-id or projects/my-project- - id/locations/{location_id}. - page_token: - Page token to continue retrieval. Comes from previous call to - ``ListDeidentifyTemplates``. - page_size: - Size of the page, can be limited by server. If zero server - returns a page of max size 100. - order_by: - Comma separated list of fields to order by, followed by - ``asc`` or ``desc`` postfix. This list is case-insensitive, - default sorting order is ascending, redundant space characters - are insignificant. Example: ``name asc,update_time, - create_time desc`` Supported fields are: - ``create_time``: - corresponds to time the template was created. - - ``update_time``: corresponds to time the template was last - updated. - ``name``: corresponds to template’s name. - - ``display_name``: corresponds to template’s display name. - location_id: - Deprecated. This field has no effect. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.ListDeidentifyTemplatesRequest) - }, -) -_sym_db.RegisterMessage(ListDeidentifyTemplatesRequest) - -ListDeidentifyTemplatesResponse = _reflection.GeneratedProtocolMessageType( - "ListDeidentifyTemplatesResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTDEIDENTIFYTEMPLATESRESPONSE, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Response message for ListDeidentifyTemplates. - Attributes: - deidentify_templates: - List of deidentify templates, up to page_size in - ListDeidentifyTemplatesRequest. - next_page_token: - If the next page is available then the next page token to be - used in following ListDeidentifyTemplates request. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.ListDeidentifyTemplatesResponse) - }, -) -_sym_db.RegisterMessage(ListDeidentifyTemplatesResponse) - -DeleteDeidentifyTemplateRequest = _reflection.GeneratedProtocolMessageType( - "DeleteDeidentifyTemplateRequest", - (_message.Message,), - { - "DESCRIPTOR": _DELETEDEIDENTIFYTEMPLATEREQUEST, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Request message for DeleteDeidentifyTemplate. - Attributes: - name: - Required. Resource name of the organization and deidentify - template to be deleted, for example - ``organizations/433245324/deidentifyTemplates/432452342`` or - projects/project-id/deidentifyTemplates/432452342. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.DeleteDeidentifyTemplateRequest) - }, -) -_sym_db.RegisterMessage(DeleteDeidentifyTemplateRequest) - -LargeCustomDictionaryConfig = _reflection.GeneratedProtocolMessageType( - "LargeCustomDictionaryConfig", - (_message.Message,), - { - "DESCRIPTOR": _LARGECUSTOMDICTIONARYCONFIG, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Configuration for a custom dictionary created from a data source of - any size up to the maximum size defined in the `limits - `__ page. The artifacts of - dictionary creation are stored in the specified Google Cloud Storage - location. Consider using ``CustomInfoType.Dictionary`` for smaller - dictionaries that satisfy the size requirements. - Attributes: - output_path: - Location to store dictionary artifacts in Google Cloud - Storage. These files will only be accessible by project owners - and the DLP API. If any of these artifacts are modified, the - dictionary is considered invalid and can no longer be used. - cloud_storage_file_set: - Set of files containing newline-delimited lists of dictionary - phrases. - big_query_field: - Field in a BigQuery table where each cell represents a - dictionary phrase. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.LargeCustomDictionaryConfig) - }, -) -_sym_db.RegisterMessage(LargeCustomDictionaryConfig) - -LargeCustomDictionaryStats = _reflection.GeneratedProtocolMessageType( - "LargeCustomDictionaryStats", - (_message.Message,), - { - "DESCRIPTOR": _LARGECUSTOMDICTIONARYSTATS, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Summary statistics of a custom dictionary. - Attributes: - approx_num_phrases: - Approximate number of distinct phrases in the dictionary. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.LargeCustomDictionaryStats) - }, -) -_sym_db.RegisterMessage(LargeCustomDictionaryStats) - -StoredInfoTypeConfig = _reflection.GeneratedProtocolMessageType( - "StoredInfoTypeConfig", - (_message.Message,), - { - "DESCRIPTOR": _STOREDINFOTYPECONFIG, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Configuration for stored infoTypes. All fields and subfield are - provided by the user. For more information, see - https://cloud.google.com/dlp/docs/creating-custom-infotypes. - Attributes: - display_name: - Display name of the StoredInfoType (max 256 characters). - description: - Description of the StoredInfoType (max 256 characters). - type: - Stored infotype types. - large_custom_dictionary: - StoredInfoType where findings are defined by a dictionary of - phrases. - dictionary: - Store dictionary-based CustomInfoType. - regex: - Store regular expression-based StoredInfoType. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.StoredInfoTypeConfig) - }, -) -_sym_db.RegisterMessage(StoredInfoTypeConfig) - -StoredInfoTypeStats = _reflection.GeneratedProtocolMessageType( - "StoredInfoTypeStats", - (_message.Message,), - { - "DESCRIPTOR": _STOREDINFOTYPESTATS, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Statistics for a StoredInfoType. - Attributes: - type: - Stat types - large_custom_dictionary: - StoredInfoType where findings are defined by a dictionary of - phrases. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.StoredInfoTypeStats) - }, -) -_sym_db.RegisterMessage(StoredInfoTypeStats) - -StoredInfoTypeVersion = _reflection.GeneratedProtocolMessageType( - "StoredInfoTypeVersion", - (_message.Message,), - { - "DESCRIPTOR": _STOREDINFOTYPEVERSION, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Version of a StoredInfoType, including the configuration used to build - it, create timestamp, and current state. - Attributes: - config: - StoredInfoType configuration. - create_time: - Create timestamp of the version. Read-only, determined by the - system when the version is created. - state: - Stored info type version state. Read-only, updated by the - system during dictionary creation. - errors: - Errors that occurred when creating this storedInfoType - version, or anomalies detected in the storedInfoType data that - render it unusable. Only the five most recent errors will be - displayed, with the most recent error appearing first. For - example, some of the data for stored custom dictionaries is - put in the user’s Google Cloud Storage bucket, and if this - data is modified or deleted by the user or another system, the - dictionary becomes invalid. If any errors occur, fix the - problem indicated by the error message and use the - UpdateStoredInfoType API method to create another version of - the storedInfoType to continue using it, reusing the same - ``config`` if it was not the source of the error. - stats: - Statistics about this storedInfoType version. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.StoredInfoTypeVersion) - }, -) -_sym_db.RegisterMessage(StoredInfoTypeVersion) - -StoredInfoType = _reflection.GeneratedProtocolMessageType( - "StoredInfoType", - (_message.Message,), - { - "DESCRIPTOR": _STOREDINFOTYPE, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """StoredInfoType resource message that contains information about the - current version and any pending updates. - Attributes: - name: - Resource name. - current_version: - Current version of the stored info type. - pending_versions: - Pending versions of the stored info type. Empty if no versions - are pending. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.StoredInfoType) - }, -) -_sym_db.RegisterMessage(StoredInfoType) - -CreateStoredInfoTypeRequest = _reflection.GeneratedProtocolMessageType( - "CreateStoredInfoTypeRequest", - (_message.Message,), - { - "DESCRIPTOR": _CREATESTOREDINFOTYPEREQUEST, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Request message for CreateStoredInfoType. - Attributes: - parent: - Required. The parent resource name, for example projects/my- - project-id or organizations/my-org-id or projects/my-project- - id/locations/{location_id} - config: - Required. Configuration of the storedInfoType to create. - stored_info_type_id: - The storedInfoType ID can contain uppercase and lowercase - letters, numbers, and hyphens; that is, it must match the - regular expression: ``[a-zA-Z\\d-_]+``. The maximum length is - 100 characters. Can be empty to allow the system to generate - one. - location_id: - Deprecated. This field has no effect. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.CreateStoredInfoTypeRequest) - }, -) -_sym_db.RegisterMessage(CreateStoredInfoTypeRequest) - -UpdateStoredInfoTypeRequest = _reflection.GeneratedProtocolMessageType( - "UpdateStoredInfoTypeRequest", - (_message.Message,), - { - "DESCRIPTOR": _UPDATESTOREDINFOTYPEREQUEST, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Request message for UpdateStoredInfoType. - Attributes: - name: - Required. Resource name of organization and storedInfoType to - be updated, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - config: - Updated configuration for the storedInfoType. If not provided, - a new version of the storedInfoType will be created with the - existing configuration. - update_mask: - Mask to control which fields get updated. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.UpdateStoredInfoTypeRequest) - }, -) -_sym_db.RegisterMessage(UpdateStoredInfoTypeRequest) - -GetStoredInfoTypeRequest = _reflection.GeneratedProtocolMessageType( - "GetStoredInfoTypeRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETSTOREDINFOTYPEREQUEST, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Request message for GetStoredInfoType. - Attributes: - name: - Required. Resource name of the organization and storedInfoType - to be read, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.GetStoredInfoTypeRequest) - }, -) -_sym_db.RegisterMessage(GetStoredInfoTypeRequest) - -ListStoredInfoTypesRequest = _reflection.GeneratedProtocolMessageType( - "ListStoredInfoTypesRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTSTOREDINFOTYPESREQUEST, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Request message for ListStoredInfoTypes. - Attributes: - parent: - Required. The parent resource name, for example projects/my- - project-id or organizations/my-org-id or projects/my-project- - id/locations/{location_id}. - page_token: - Page token to continue retrieval. Comes from previous call to - ``ListStoredInfoTypes``. - page_size: - Size of the page, can be limited by server. If zero server - returns a page of max size 100. - order_by: - Comma separated list of fields to order by, followed by - ``asc`` or ``desc`` postfix. This list is case-insensitive, - default sorting order is ascending, redundant space characters - are insignificant. Example: ``name asc, display_name, - create_time desc`` Supported fields are: - ``create_time``: - corresponds to time the most recent version of the resource - was created. - ``state``: corresponds to the state of the - resource. - ``name``: corresponds to resource name. - - ``display_name``: corresponds to info type’s display name. - location_id: - Deprecated. This field has no effect. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.ListStoredInfoTypesRequest) - }, -) -_sym_db.RegisterMessage(ListStoredInfoTypesRequest) - -ListStoredInfoTypesResponse = _reflection.GeneratedProtocolMessageType( - "ListStoredInfoTypesResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTSTOREDINFOTYPESRESPONSE, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Response message for ListStoredInfoTypes. - Attributes: - stored_info_types: - List of storedInfoTypes, up to page_size in - ListStoredInfoTypesRequest. - next_page_token: - If the next page is available then the next page token to be - used in following ListStoredInfoTypes request. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.ListStoredInfoTypesResponse) - }, -) -_sym_db.RegisterMessage(ListStoredInfoTypesResponse) - -DeleteStoredInfoTypeRequest = _reflection.GeneratedProtocolMessageType( - "DeleteStoredInfoTypeRequest", - (_message.Message,), - { - "DESCRIPTOR": _DELETESTOREDINFOTYPEREQUEST, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Request message for DeleteStoredInfoType. - Attributes: - name: - Required. Resource name of the organization and storedInfoType - to be deleted, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.DeleteStoredInfoTypeRequest) - }, -) -_sym_db.RegisterMessage(DeleteStoredInfoTypeRequest) - -HybridInspectJobTriggerRequest = _reflection.GeneratedProtocolMessageType( - "HybridInspectJobTriggerRequest", - (_message.Message,), - { - "DESCRIPTOR": _HYBRIDINSPECTJOBTRIGGERREQUEST, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Request to search for potentially sensitive info in a custom location. - Attributes: - name: - Required. Resource name of the trigger to execute a hybrid - inspect on, for example ``projects/dlp-test- - project/jobTriggers/53234423``. - hybrid_item: - The item to inspect. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.HybridInspectJobTriggerRequest) - }, -) -_sym_db.RegisterMessage(HybridInspectJobTriggerRequest) - -HybridInspectDlpJobRequest = _reflection.GeneratedProtocolMessageType( - "HybridInspectDlpJobRequest", - (_message.Message,), - { - "DESCRIPTOR": _HYBRIDINSPECTDLPJOBREQUEST, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Request to search for potentially sensitive info in a custom location. - Attributes: - name: - Required. Resource name of the job to execute a hybrid inspect - on, for example ``projects/dlp-test-project/dlpJob/53234423``. - hybrid_item: - The item to inspect. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.HybridInspectDlpJobRequest) - }, -) -_sym_db.RegisterMessage(HybridInspectDlpJobRequest) - -HybridContentItem = _reflection.GeneratedProtocolMessageType( - "HybridContentItem", - (_message.Message,), - { - "DESCRIPTOR": _HYBRIDCONTENTITEM, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """An individual hybrid item to inspect. Will be stored temporarily - during processing. - Attributes: - item: - The item to inspect. - finding_details: - Supplementary information that will be added to each finding. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.HybridContentItem) - }, -) -_sym_db.RegisterMessage(HybridContentItem) - -HybridFindingDetails = _reflection.GeneratedProtocolMessageType( - "HybridFindingDetails", - (_message.Message,), - { - "LabelsEntry": _reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - { - "DESCRIPTOR": _HYBRIDFINDINGDETAILS_LABELSENTRY, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2" - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.HybridFindingDetails.LabelsEntry) - }, - ), - "DESCRIPTOR": _HYBRIDFINDINGDETAILS, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Populate to associate additional data with each finding. - Attributes: - container_details: - Details about the container where the content being inspected - is from. - file_offset: - Offset in bytes of the line, from the beginning of the file, - where the finding is located. Populate if the item being - scanned is only part of a bigger item, such as a shard of a - file and you want to track the absolute position of the - finding. - row_offset: - Offset of the row for tables. Populate if the row(s) being - scanned are part of a bigger dataset and you want to keep - track of their absolute position. - table_options: - If the container is a table, additional information to make - findings meaningful such as the columns that are primary keys. - If not known ahead of time, can also be set within each - inspect hybrid call and the two will be merged. Note that - identifying_fields will only be stored to BigQuery, and only - if the BigQuery action has been included. - labels: - Labels to represent user provided metadata about the data - being inspected. If configured by the job, some key values may - be required. The labels associated with ``Finding``\ ’s - produced by hybrid inspection. Label keys must be between 1 - and 63 characters long and must conform to the following - regular expression: ``[a-z]([-a-z0-9]*[a-z0-9])?``. Label - values must be between 0 and 63 characters long and must - conform to the regular expression - ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. No more than 10 labels can - be associated with a given finding. Examples: \* - ``"environment" : "production"`` \* ``"pipeline" : "etl"`` - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.HybridFindingDetails) - }, -) -_sym_db.RegisterMessage(HybridFindingDetails) -_sym_db.RegisterMessage(HybridFindingDetails.LabelsEntry) - -HybridInspectResponse = _reflection.GeneratedProtocolMessageType( - "HybridInspectResponse", - (_message.Message,), - { - "DESCRIPTOR": _HYBRIDINSPECTRESPONSE, - "__module__": "google.cloud.dlp_v2.proto.dlp_pb2", - "__doc__": """Quota exceeded errors will be thrown once quota has been met.""", - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.HybridInspectResponse) - }, -) -_sym_db.RegisterMessage(HybridInspectResponse) - - -DESCRIPTOR._options = None -_FINDING_LABELSENTRY._options = None -_FINDING.fields_by_name["resource_name"]._options = None -_FINDING.fields_by_name["trigger_name"]._options = None -_FINDING.fields_by_name["job_name"]._options = None -_FINDING._options = None -_REDACTIMAGEREQUEST.fields_by_name["parent"]._options = None -_DEIDENTIFYCONTENTREQUEST.fields_by_name["parent"]._options = None -_REIDENTIFYCONTENTREQUEST.fields_by_name["parent"]._options = None -_INSPECTCONTENTREQUEST.fields_by_name["parent"]._options = None -_QUASIID.fields_by_name["field"]._options = None -_STATISTICALTABLE.fields_by_name["table"]._options = None -_STATISTICALTABLE.fields_by_name["quasi_ids"]._options = None -_STATISTICALTABLE.fields_by_name["relative_frequency"]._options = None -_PRIVACYMETRIC_KMAPESTIMATIONCONFIG_TAGGEDFIELD.fields_by_name["field"]._options = None -_PRIVACYMETRIC_KMAPESTIMATIONCONFIG_AUXILIARYTABLE.fields_by_name[ - "table" -]._options = None -_PRIVACYMETRIC_KMAPESTIMATIONCONFIG_AUXILIARYTABLE.fields_by_name[ - "quasi_ids" -]._options = None -_PRIVACYMETRIC_KMAPESTIMATIONCONFIG_AUXILIARYTABLE.fields_by_name[ - "relative_frequency" -]._options = None -_PRIVACYMETRIC_KMAPESTIMATIONCONFIG.fields_by_name["quasi_ids"]._options = None -_PRIVACYMETRIC_DELTAPRESENCEESTIMATIONCONFIG.fields_by_name["quasi_ids"]._options = None -_FIXEDSIZEBUCKETINGCONFIG.fields_by_name["lower_bound"]._options = None -_FIXEDSIZEBUCKETINGCONFIG.fields_by_name["upper_bound"]._options = None -_FIXEDSIZEBUCKETINGCONFIG.fields_by_name["bucket_size"]._options = None -_CRYPTOREPLACEFFXFPECONFIG.fields_by_name["crypto_key"]._options = None -_TRANSIENTCRYPTOKEY.fields_by_name["name"]._options = None -_UNWRAPPEDCRYPTOKEY.fields_by_name["key"]._options = None -_KMSWRAPPEDCRYPTOKEY.fields_by_name["wrapped_key"]._options = None -_KMSWRAPPEDCRYPTOKEY.fields_by_name["crypto_key_name"]._options = None -_DATESHIFTCONFIG.fields_by_name["upper_bound_days"]._options = None -_DATESHIFTCONFIG.fields_by_name["lower_bound_days"]._options = None -_INFOTYPETRANSFORMATIONS_INFOTYPETRANSFORMATION.fields_by_name[ - "primitive_transformation" -]._options = None -_INFOTYPETRANSFORMATIONS.fields_by_name["transformations"]._options = None -_FIELDTRANSFORMATION.fields_by_name["fields"]._options = None -_RECORDCONDITION_CONDITION.fields_by_name["field"]._options = None -_RECORDCONDITION_CONDITION.fields_by_name["operator"]._options = None -_INSPECTTEMPLATE.fields_by_name["name"]._options = None -_INSPECTTEMPLATE.fields_by_name["create_time"]._options = None -_INSPECTTEMPLATE.fields_by_name["update_time"]._options = None -_INSPECTTEMPLATE._options = None -_DEIDENTIFYTEMPLATE.fields_by_name["name"]._options = None -_DEIDENTIFYTEMPLATE.fields_by_name["create_time"]._options = None -_DEIDENTIFYTEMPLATE.fields_by_name["update_time"]._options = None -_DEIDENTIFYTEMPLATE._options = None -_JOBTRIGGER.fields_by_name["errors"]._options = None -_JOBTRIGGER.fields_by_name["create_time"]._options = None -_JOBTRIGGER.fields_by_name["update_time"]._options = None -_JOBTRIGGER.fields_by_name["last_run_time"]._options = None -_JOBTRIGGER.fields_by_name["status"]._options = None -_JOBTRIGGER._options = None -_CREATEINSPECTTEMPLATEREQUEST.fields_by_name["parent"]._options = None -_CREATEINSPECTTEMPLATEREQUEST.fields_by_name["inspect_template"]._options = None -_UPDATEINSPECTTEMPLATEREQUEST.fields_by_name["name"]._options = None -_GETINSPECTTEMPLATEREQUEST.fields_by_name["name"]._options = None -_LISTINSPECTTEMPLATESREQUEST.fields_by_name["parent"]._options = None -_DELETEINSPECTTEMPLATEREQUEST.fields_by_name["name"]._options = None -_CREATEJOBTRIGGERREQUEST.fields_by_name["parent"]._options = None -_CREATEJOBTRIGGERREQUEST.fields_by_name["job_trigger"]._options = None -_ACTIVATEJOBTRIGGERREQUEST.fields_by_name["name"]._options = None -_UPDATEJOBTRIGGERREQUEST.fields_by_name["name"]._options = None -_GETJOBTRIGGERREQUEST.fields_by_name["name"]._options = None -_CREATEDLPJOBREQUEST.fields_by_name["parent"]._options = None -_LISTJOBTRIGGERSREQUEST.fields_by_name["parent"]._options = None -_DELETEJOBTRIGGERREQUEST.fields_by_name["name"]._options = None -_DLPJOB._options = None -_GETDLPJOBREQUEST.fields_by_name["name"]._options = None -_LISTDLPJOBSREQUEST.fields_by_name["parent"]._options = None -_CANCELDLPJOBREQUEST.fields_by_name["name"]._options = None -_FINISHDLPJOBREQUEST.fields_by_name["name"]._options = None -_DELETEDLPJOBREQUEST.fields_by_name["name"]._options = None -_CREATEDEIDENTIFYTEMPLATEREQUEST.fields_by_name["parent"]._options = None -_CREATEDEIDENTIFYTEMPLATEREQUEST.fields_by_name["deidentify_template"]._options = None -_UPDATEDEIDENTIFYTEMPLATEREQUEST.fields_by_name["name"]._options = None -_GETDEIDENTIFYTEMPLATEREQUEST.fields_by_name["name"]._options = None -_LISTDEIDENTIFYTEMPLATESREQUEST.fields_by_name["parent"]._options = None -_DELETEDEIDENTIFYTEMPLATEREQUEST.fields_by_name["name"]._options = None -_STOREDINFOTYPE._options = None -_CREATESTOREDINFOTYPEREQUEST.fields_by_name["parent"]._options = None -_CREATESTOREDINFOTYPEREQUEST.fields_by_name["config"]._options = None -_UPDATESTOREDINFOTYPEREQUEST.fields_by_name["name"]._options = None -_GETSTOREDINFOTYPEREQUEST.fields_by_name["name"]._options = None -_LISTSTOREDINFOTYPESREQUEST.fields_by_name["parent"]._options = None -_DELETESTOREDINFOTYPEREQUEST.fields_by_name["name"]._options = None -_HYBRIDINSPECTJOBTRIGGERREQUEST.fields_by_name["name"]._options = None -_HYBRIDINSPECTDLPJOBREQUEST.fields_by_name["name"]._options = None -_HYBRIDFINDINGDETAILS_LABELSENTRY._options = None - -_DLPSERVICE = _descriptor.ServiceDescriptor( - name="DlpService", - full_name="google.privacy.dlp.v2.DlpService", - file=DESCRIPTOR, - index=0, - serialized_options=b"\312A\022dlp.googleapis.com\322A.https://www.googleapis.com/auth/cloud-platform", - serialized_start=33110, - serialized_end=42521, - methods=[ - _descriptor.MethodDescriptor( - name="InspectContent", - full_name="google.privacy.dlp.v2.DlpService.InspectContent", - index=0, - containing_service=None, - input_type=_INSPECTCONTENTREQUEST, - output_type=_INSPECTCONTENTRESPONSE, - serialized_options=b'\202\323\344\223\002f"\'/v2/{parent=projects/*}/content:inspect:\001*Z8"3/v2/{parent=projects/*/locations/*}/content:inspect:\001*', - ), - _descriptor.MethodDescriptor( - name="RedactImage", - full_name="google.privacy.dlp.v2.DlpService.RedactImage", - index=1, - containing_service=None, - input_type=_REDACTIMAGEREQUEST, - output_type=_REDACTIMAGERESPONSE, - serialized_options=b'\202\323\344\223\002`"$/v2/{parent=projects/*}/image:redact:\001*Z5"0/v2/{parent=projects/*/locations/*}/image:redact:\001*', - ), - _descriptor.MethodDescriptor( - name="DeidentifyContent", - full_name="google.privacy.dlp.v2.DlpService.DeidentifyContent", - index=2, - containing_service=None, - input_type=_DEIDENTIFYCONTENTREQUEST, - output_type=_DEIDENTIFYCONTENTRESPONSE, - serialized_options=b'\202\323\344\223\002l"*/v2/{parent=projects/*}/content:deidentify:\001*Z;"6/v2/{parent=projects/*/locations/*}/content:deidentify:\001*', - ), - _descriptor.MethodDescriptor( - name="ReidentifyContent", - full_name="google.privacy.dlp.v2.DlpService.ReidentifyContent", - index=3, - containing_service=None, - input_type=_REIDENTIFYCONTENTREQUEST, - output_type=_REIDENTIFYCONTENTRESPONSE, - serialized_options=b'\202\323\344\223\002l"*/v2/{parent=projects/*}/content:reidentify:\001*Z;"6/v2/{parent=projects/*/locations/*}/content:reidentify:\001*', - ), - _descriptor.MethodDescriptor( - name="ListInfoTypes", - full_name="google.privacy.dlp.v2.DlpService.ListInfoTypes", - index=4, - containing_service=None, - input_type=_LISTINFOTYPESREQUEST, - output_type=_LISTINFOTYPESRESPONSE, - serialized_options=b'\202\323\344\223\0025\022\r/v2/infoTypesZ$\022"/v2/{parent=locations/*}/infoTypes\332A\006parent', - ), - _descriptor.MethodDescriptor( - name="CreateInspectTemplate", - full_name="google.privacy.dlp.v2.DlpService.CreateInspectTemplate", - index=5, - containing_service=None, - input_type=_CREATEINSPECTTEMPLATEREQUEST, - output_type=_INSPECTTEMPLATE, - serialized_options=b'\202\323\344\223\002\334\001"-/v2/{parent=organizations/*}/inspectTemplates:\001*Z>"9/v2/{parent=organizations/*/locations/*}/inspectTemplates:\001*Z-"(/v2/{parent=projects/*}/inspectTemplates:\001*Z9"4/v2/{parent=projects/*/locations/*}/inspectTemplates:\001*\332A\027parent,inspect_template', - ), - _descriptor.MethodDescriptor( - name="UpdateInspectTemplate", - full_name="google.privacy.dlp.v2.DlpService.UpdateInspectTemplate", - index=6, - containing_service=None, - input_type=_UPDATEINSPECTTEMPLATEREQUEST, - output_type=_INSPECTTEMPLATE, - serialized_options=b"\202\323\344\223\002\334\0012-/v2/{name=organizations/*/inspectTemplates/*}:\001*Z>29/v2/{name=organizations/*/locations/*/inspectTemplates/*}:\001*Z-2(/v2/{name=projects/*/inspectTemplates/*}:\001*Z924/v2/{name=projects/*/locations/*/inspectTemplates/*}:\001*\332A!name,inspect_template,update_mask", - ), - _descriptor.MethodDescriptor( - name="GetInspectTemplate", - full_name="google.privacy.dlp.v2.DlpService.GetInspectTemplate", - index=7, - containing_service=None, - input_type=_GETINSPECTTEMPLATEREQUEST, - output_type=_INSPECTTEMPLATE, - serialized_options=b"\202\323\344\223\002\320\001\022-/v2/{name=organizations/*/inspectTemplates/*}Z;\0229/v2/{name=organizations/*/locations/*/inspectTemplates/*}Z*\022(/v2/{name=projects/*/inspectTemplates/*}Z6\0224/v2/{name=projects/*/locations/*/inspectTemplates/*}\332A\004name", - ), - _descriptor.MethodDescriptor( - name="ListInspectTemplates", - full_name="google.privacy.dlp.v2.DlpService.ListInspectTemplates", - index=8, - containing_service=None, - input_type=_LISTINSPECTTEMPLATESREQUEST, - output_type=_LISTINSPECTTEMPLATESRESPONSE, - serialized_options=b"\202\323\344\223\002\320\001\022-/v2/{parent=organizations/*}/inspectTemplatesZ;\0229/v2/{parent=organizations/*/locations/*}/inspectTemplatesZ*\022(/v2/{parent=projects/*}/inspectTemplatesZ6\0224/v2/{parent=projects/*/locations/*}/inspectTemplates\332A\006parent", - ), - _descriptor.MethodDescriptor( - name="DeleteInspectTemplate", - full_name="google.privacy.dlp.v2.DlpService.DeleteInspectTemplate", - index=9, - containing_service=None, - input_type=_DELETEINSPECTTEMPLATEREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b"\202\323\344\223\002\320\001*-/v2/{name=organizations/*/inspectTemplates/*}Z;*9/v2/{name=organizations/*/locations/*/inspectTemplates/*}Z**(/v2/{name=projects/*/inspectTemplates/*}Z6*4/v2/{name=projects/*/locations/*/inspectTemplates/*}\332A\004name", - ), - _descriptor.MethodDescriptor( - name="CreateDeidentifyTemplate", - full_name="google.privacy.dlp.v2.DlpService.CreateDeidentifyTemplate", - index=10, - containing_service=None, - input_type=_CREATEDEIDENTIFYTEMPLATEREQUEST, - output_type=_DEIDENTIFYTEMPLATE, - serialized_options=b'\202\323\344\223\002\350\001"0/v2/{parent=organizations/*}/deidentifyTemplates:\001*ZA"\022\022*"9/v2/{name=projects/*/locations/*/dlpJobs/*}:hybridInspect:\001*\332A\004name', - ), - _descriptor.MethodDescriptor( - name="FinishDlpJob", - full_name="google.privacy.dlp.v2.DlpService.FinishDlpJob", - index=33, - containing_service=None, - input_type=_FINISHDLPJOBREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b'\202\323\344\223\0027"2/v2/{name=projects/*/locations/*/dlpJobs/*}:finish:\001*', - ), - ], -) -_sym_db.RegisterServiceDescriptor(_DLPSERVICE) - -DESCRIPTOR.services_by_name["DlpService"] = _DLPSERVICE - -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/dlp_v2/proto/dlp_pb2_grpc.py b/google/cloud/dlp_v2/proto/dlp_pb2_grpc.py deleted file mode 100644 index 81a2aed5..00000000 --- a/google/cloud/dlp_v2/proto/dlp_pb2_grpc.py +++ /dev/null @@ -1,721 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc - -from google.cloud.dlp_v2.proto import ( - dlp_pb2 as google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class DlpServiceStub(object): - """The Cloud Data Loss Prevention (DLP) API is a service that allows clients - to detect the presence of Personally Identifiable Information (PII) and other - privacy-sensitive data in user-supplied, unstructured data streams, like text - blocks or images. - The service also includes methods for sensitive data redaction and - scheduling of data scans on Google Cloud Platform based data sets. - - To learn more about concepts and find how-to guides see - https://cloud.google.com/dlp/docs/. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.InspectContent = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/InspectContent", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.InspectContentRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.InspectContentResponse.FromString, - ) - self.RedactImage = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/RedactImage", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.RedactImageRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.RedactImageResponse.FromString, - ) - self.DeidentifyContent = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/DeidentifyContent", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.DeidentifyContentRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.DeidentifyContentResponse.FromString, - ) - self.ReidentifyContent = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/ReidentifyContent", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.ReidentifyContentRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.ReidentifyContentResponse.FromString, - ) - self.ListInfoTypes = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/ListInfoTypes", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.ListInfoTypesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.ListInfoTypesResponse.FromString, - ) - self.CreateInspectTemplate = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/CreateInspectTemplate", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.CreateInspectTemplateRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.InspectTemplate.FromString, - ) - self.UpdateInspectTemplate = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/UpdateInspectTemplate", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.UpdateInspectTemplateRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.InspectTemplate.FromString, - ) - self.GetInspectTemplate = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/GetInspectTemplate", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.GetInspectTemplateRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.InspectTemplate.FromString, - ) - self.ListInspectTemplates = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/ListInspectTemplates", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.ListInspectTemplatesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.ListInspectTemplatesResponse.FromString, - ) - self.DeleteInspectTemplate = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/DeleteInspectTemplate", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.DeleteInspectTemplateRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.CreateDeidentifyTemplate = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/CreateDeidentifyTemplate", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.CreateDeidentifyTemplateRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.DeidentifyTemplate.FromString, - ) - self.UpdateDeidentifyTemplate = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/UpdateDeidentifyTemplate", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.UpdateDeidentifyTemplateRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.DeidentifyTemplate.FromString, - ) - self.GetDeidentifyTemplate = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/GetDeidentifyTemplate", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.GetDeidentifyTemplateRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.DeidentifyTemplate.FromString, - ) - self.ListDeidentifyTemplates = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/ListDeidentifyTemplates", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.ListDeidentifyTemplatesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.ListDeidentifyTemplatesResponse.FromString, - ) - self.DeleteDeidentifyTemplate = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/DeleteDeidentifyTemplate", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.DeleteDeidentifyTemplateRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.CreateJobTrigger = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/CreateJobTrigger", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.CreateJobTriggerRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.JobTrigger.FromString, - ) - self.UpdateJobTrigger = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/UpdateJobTrigger", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.UpdateJobTriggerRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.JobTrigger.FromString, - ) - self.HybridInspectJobTrigger = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/HybridInspectJobTrigger", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.HybridInspectJobTriggerRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.HybridInspectResponse.FromString, - ) - self.GetJobTrigger = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/GetJobTrigger", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.GetJobTriggerRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.JobTrigger.FromString, - ) - self.ListJobTriggers = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/ListJobTriggers", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.ListJobTriggersRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.ListJobTriggersResponse.FromString, - ) - self.DeleteJobTrigger = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/DeleteJobTrigger", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.DeleteJobTriggerRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.ActivateJobTrigger = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/ActivateJobTrigger", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.ActivateJobTriggerRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.DlpJob.FromString, - ) - self.CreateDlpJob = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/CreateDlpJob", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.CreateDlpJobRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.DlpJob.FromString, - ) - self.ListDlpJobs = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/ListDlpJobs", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.ListDlpJobsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.ListDlpJobsResponse.FromString, - ) - self.GetDlpJob = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/GetDlpJob", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.GetDlpJobRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.DlpJob.FromString, - ) - self.DeleteDlpJob = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/DeleteDlpJob", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.DeleteDlpJobRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.CancelDlpJob = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/CancelDlpJob", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.CancelDlpJobRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.CreateStoredInfoType = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/CreateStoredInfoType", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.CreateStoredInfoTypeRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.StoredInfoType.FromString, - ) - self.UpdateStoredInfoType = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/UpdateStoredInfoType", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.UpdateStoredInfoTypeRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.StoredInfoType.FromString, - ) - self.GetStoredInfoType = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/GetStoredInfoType", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.GetStoredInfoTypeRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.StoredInfoType.FromString, - ) - self.ListStoredInfoTypes = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/ListStoredInfoTypes", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.ListStoredInfoTypesRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.ListStoredInfoTypesResponse.FromString, - ) - self.DeleteStoredInfoType = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/DeleteStoredInfoType", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.DeleteStoredInfoTypeRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.HybridInspectDlpJob = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/HybridInspectDlpJob", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.HybridInspectDlpJobRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.HybridInspectResponse.FromString, - ) - self.FinishDlpJob = channel.unary_unary( - "/google.privacy.dlp.v2.DlpService/FinishDlpJob", - request_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.FinishDlpJobRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - - -class DlpServiceServicer(object): - """The Cloud Data Loss Prevention (DLP) API is a service that allows clients - to detect the presence of Personally Identifiable Information (PII) and other - privacy-sensitive data in user-supplied, unstructured data streams, like text - blocks or images. - The service also includes methods for sensitive data redaction and - scheduling of data scans on Google Cloud Platform based data sets. - - To learn more about concepts and find how-to guides see - https://cloud.google.com/dlp/docs/. - """ - - def InspectContent(self, request, context): - """Finds potentially sensitive info in content. - This method has limits on input size, processing time, and output size. - - When no InfoTypes or CustomInfoTypes are specified in this request, the - system will automatically choose what detectors to run. By default this may - be all types, but may change over time as detectors are updated. - - For how to guides, see https://cloud.google.com/dlp/docs/inspecting-images - and https://cloud.google.com/dlp/docs/inspecting-text, - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def RedactImage(self, request, context): - """Redacts potentially sensitive info from an image. - This method has limits on input size, processing time, and output size. - See https://cloud.google.com/dlp/docs/redacting-sensitive-data-images to - learn more. - - When no InfoTypes or CustomInfoTypes are specified in this request, the - system will automatically choose what detectors to run. By default this may - be all types, but may change over time as detectors are updated. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeidentifyContent(self, request, context): - """De-identifies potentially sensitive info from a ContentItem. - This method has limits on input size and output size. - See https://cloud.google.com/dlp/docs/deidentify-sensitive-data to - learn more. - - When no InfoTypes or CustomInfoTypes are specified in this request, the - system will automatically choose what detectors to run. By default this may - be all types, but may change over time as detectors are updated. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ReidentifyContent(self, request, context): - """Re-identifies content that has been de-identified. - See - https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example - to learn more. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListInfoTypes(self, request, context): - """Returns a list of the sensitive information types that the DLP API - supports. See https://cloud.google.com/dlp/docs/infotypes-reference to - learn more. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateInspectTemplate(self, request, context): - """Creates an InspectTemplate for re-using frequently used configuration - for inspecting content, images, and storage. - See https://cloud.google.com/dlp/docs/creating-templates to learn more. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateInspectTemplate(self, request, context): - """Updates the InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates to learn more. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetInspectTemplate(self, request, context): - """Gets an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates to learn more. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListInspectTemplates(self, request, context): - """Lists InspectTemplates. - See https://cloud.google.com/dlp/docs/creating-templates to learn more. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteInspectTemplate(self, request, context): - """Deletes an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates to learn more. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateDeidentifyTemplate(self, request, context): - """Creates a DeidentifyTemplate for re-using frequently used configuration - for de-identifying content, images, and storage. - See https://cloud.google.com/dlp/docs/creating-templates-deid to learn - more. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateDeidentifyTemplate(self, request, context): - """Updates the DeidentifyTemplate. - See https://cloud.google.com/dlp/docs/creating-templates-deid to learn - more. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetDeidentifyTemplate(self, request, context): - """Gets a DeidentifyTemplate. - See https://cloud.google.com/dlp/docs/creating-templates-deid to learn - more. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListDeidentifyTemplates(self, request, context): - """Lists DeidentifyTemplates. - See https://cloud.google.com/dlp/docs/creating-templates-deid to learn - more. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteDeidentifyTemplate(self, request, context): - """Deletes a DeidentifyTemplate. - See https://cloud.google.com/dlp/docs/creating-templates-deid to learn - more. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateJobTrigger(self, request, context): - """Creates a job trigger to run DLP actions such as scanning storage for - sensitive information on a set schedule. - See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateJobTrigger(self, request, context): - """Updates a job trigger. - See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def HybridInspectJobTrigger(self, request, context): - """Inspect hybrid content and store findings to a trigger. The inspection - will be processed asynchronously. To review the findings monitor the - jobs within the trigger. - Early access feature is in a pre-release state and might change or have - limited support. For more information, see - https://cloud.google.com/products#product-launch-stages. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetJobTrigger(self, request, context): - """Gets a job trigger. - See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListJobTriggers(self, request, context): - """Lists job triggers. - See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteJobTrigger(self, request, context): - """Deletes a job trigger. - See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ActivateJobTrigger(self, request, context): - """Activate a job trigger. Causes the immediate execute of a trigger - instead of waiting on the trigger event to occur. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateDlpJob(self, request, context): - """Creates a new job to inspect storage or calculate risk metrics. - See https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more. - - When no InfoTypes or CustomInfoTypes are specified in inspect jobs, the - system will automatically choose what detectors to run. By default this may - be all types, but may change over time as detectors are updated. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListDlpJobs(self, request, context): - """Lists DlpJobs that match the specified filter in the request. - See https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetDlpJob(self, request, context): - """Gets the latest state of a long-running DlpJob. - See https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteDlpJob(self, request, context): - """Deletes a long-running DlpJob. This method indicates that the client is - no longer interested in the DlpJob result. The job will be cancelled if - possible. - See https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CancelDlpJob(self, request, context): - """Starts asynchronous cancellation on a long-running DlpJob. The server - makes a best effort to cancel the DlpJob, but success is not - guaranteed. - See https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def CreateStoredInfoType(self, request, context): - """Creates a pre-built stored infoType to be used for inspection. - See https://cloud.google.com/dlp/docs/creating-stored-infotypes to - learn more. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateStoredInfoType(self, request, context): - """Updates the stored infoType by creating a new version. The existing version - will continue to be used until the new version is ready. - See https://cloud.google.com/dlp/docs/creating-stored-infotypes to - learn more. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetStoredInfoType(self, request, context): - """Gets a stored infoType. - See https://cloud.google.com/dlp/docs/creating-stored-infotypes to - learn more. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListStoredInfoTypes(self, request, context): - """Lists stored infoTypes. - See https://cloud.google.com/dlp/docs/creating-stored-infotypes to - learn more. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteStoredInfoType(self, request, context): - """Deletes a stored infoType. - See https://cloud.google.com/dlp/docs/creating-stored-infotypes to - learn more. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def HybridInspectDlpJob(self, request, context): - """Inspect hybrid content and store findings to a job. - To review the findings inspect the job. Inspection will occur - asynchronously. - Early access feature is in a pre-release state and might change or have - limited support. For more information, see - https://cloud.google.com/products#product-launch-stages. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def FinishDlpJob(self, request, context): - """Finish a running hybrid DlpJob. Triggers the finalization steps and running - of any enabled actions that have not yet run. - Early access feature is in a pre-release state and might change or have - limited support. For more information, see - https://cloud.google.com/products#product-launch-stages. - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_DlpServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - "InspectContent": grpc.unary_unary_rpc_method_handler( - servicer.InspectContent, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.InspectContentRequest.FromString, - response_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.InspectContentResponse.SerializeToString, - ), - "RedactImage": grpc.unary_unary_rpc_method_handler( - servicer.RedactImage, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.RedactImageRequest.FromString, - response_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.RedactImageResponse.SerializeToString, - ), - "DeidentifyContent": grpc.unary_unary_rpc_method_handler( - servicer.DeidentifyContent, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.DeidentifyContentRequest.FromString, - response_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.DeidentifyContentResponse.SerializeToString, - ), - "ReidentifyContent": grpc.unary_unary_rpc_method_handler( - servicer.ReidentifyContent, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.ReidentifyContentRequest.FromString, - response_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.ReidentifyContentResponse.SerializeToString, - ), - "ListInfoTypes": grpc.unary_unary_rpc_method_handler( - servicer.ListInfoTypes, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.ListInfoTypesRequest.FromString, - response_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.ListInfoTypesResponse.SerializeToString, - ), - "CreateInspectTemplate": grpc.unary_unary_rpc_method_handler( - servicer.CreateInspectTemplate, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.CreateInspectTemplateRequest.FromString, - response_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.InspectTemplate.SerializeToString, - ), - "UpdateInspectTemplate": grpc.unary_unary_rpc_method_handler( - servicer.UpdateInspectTemplate, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.UpdateInspectTemplateRequest.FromString, - response_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.InspectTemplate.SerializeToString, - ), - "GetInspectTemplate": grpc.unary_unary_rpc_method_handler( - servicer.GetInspectTemplate, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.GetInspectTemplateRequest.FromString, - response_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.InspectTemplate.SerializeToString, - ), - "ListInspectTemplates": grpc.unary_unary_rpc_method_handler( - servicer.ListInspectTemplates, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.ListInspectTemplatesRequest.FromString, - response_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.ListInspectTemplatesResponse.SerializeToString, - ), - "DeleteInspectTemplate": grpc.unary_unary_rpc_method_handler( - servicer.DeleteInspectTemplate, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.DeleteInspectTemplateRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "CreateDeidentifyTemplate": grpc.unary_unary_rpc_method_handler( - servicer.CreateDeidentifyTemplate, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.CreateDeidentifyTemplateRequest.FromString, - response_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.DeidentifyTemplate.SerializeToString, - ), - "UpdateDeidentifyTemplate": grpc.unary_unary_rpc_method_handler( - servicer.UpdateDeidentifyTemplate, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.UpdateDeidentifyTemplateRequest.FromString, - response_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.DeidentifyTemplate.SerializeToString, - ), - "GetDeidentifyTemplate": grpc.unary_unary_rpc_method_handler( - servicer.GetDeidentifyTemplate, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.GetDeidentifyTemplateRequest.FromString, - response_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.DeidentifyTemplate.SerializeToString, - ), - "ListDeidentifyTemplates": grpc.unary_unary_rpc_method_handler( - servicer.ListDeidentifyTemplates, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.ListDeidentifyTemplatesRequest.FromString, - response_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.ListDeidentifyTemplatesResponse.SerializeToString, - ), - "DeleteDeidentifyTemplate": grpc.unary_unary_rpc_method_handler( - servicer.DeleteDeidentifyTemplate, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.DeleteDeidentifyTemplateRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "CreateJobTrigger": grpc.unary_unary_rpc_method_handler( - servicer.CreateJobTrigger, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.CreateJobTriggerRequest.FromString, - response_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.JobTrigger.SerializeToString, - ), - "UpdateJobTrigger": grpc.unary_unary_rpc_method_handler( - servicer.UpdateJobTrigger, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.UpdateJobTriggerRequest.FromString, - response_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.JobTrigger.SerializeToString, - ), - "HybridInspectJobTrigger": grpc.unary_unary_rpc_method_handler( - servicer.HybridInspectJobTrigger, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.HybridInspectJobTriggerRequest.FromString, - response_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.HybridInspectResponse.SerializeToString, - ), - "GetJobTrigger": grpc.unary_unary_rpc_method_handler( - servicer.GetJobTrigger, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.GetJobTriggerRequest.FromString, - response_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.JobTrigger.SerializeToString, - ), - "ListJobTriggers": grpc.unary_unary_rpc_method_handler( - servicer.ListJobTriggers, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.ListJobTriggersRequest.FromString, - response_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.ListJobTriggersResponse.SerializeToString, - ), - "DeleteJobTrigger": grpc.unary_unary_rpc_method_handler( - servicer.DeleteJobTrigger, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.DeleteJobTriggerRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "ActivateJobTrigger": grpc.unary_unary_rpc_method_handler( - servicer.ActivateJobTrigger, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.ActivateJobTriggerRequest.FromString, - response_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.DlpJob.SerializeToString, - ), - "CreateDlpJob": grpc.unary_unary_rpc_method_handler( - servicer.CreateDlpJob, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.CreateDlpJobRequest.FromString, - response_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.DlpJob.SerializeToString, - ), - "ListDlpJobs": grpc.unary_unary_rpc_method_handler( - servicer.ListDlpJobs, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.ListDlpJobsRequest.FromString, - response_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.ListDlpJobsResponse.SerializeToString, - ), - "GetDlpJob": grpc.unary_unary_rpc_method_handler( - servicer.GetDlpJob, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.GetDlpJobRequest.FromString, - response_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.DlpJob.SerializeToString, - ), - "DeleteDlpJob": grpc.unary_unary_rpc_method_handler( - servicer.DeleteDlpJob, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.DeleteDlpJobRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "CancelDlpJob": grpc.unary_unary_rpc_method_handler( - servicer.CancelDlpJob, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.CancelDlpJobRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "CreateStoredInfoType": grpc.unary_unary_rpc_method_handler( - servicer.CreateStoredInfoType, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.CreateStoredInfoTypeRequest.FromString, - response_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.StoredInfoType.SerializeToString, - ), - "UpdateStoredInfoType": grpc.unary_unary_rpc_method_handler( - servicer.UpdateStoredInfoType, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.UpdateStoredInfoTypeRequest.FromString, - response_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.StoredInfoType.SerializeToString, - ), - "GetStoredInfoType": grpc.unary_unary_rpc_method_handler( - servicer.GetStoredInfoType, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.GetStoredInfoTypeRequest.FromString, - response_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.StoredInfoType.SerializeToString, - ), - "ListStoredInfoTypes": grpc.unary_unary_rpc_method_handler( - servicer.ListStoredInfoTypes, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.ListStoredInfoTypesRequest.FromString, - response_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.ListStoredInfoTypesResponse.SerializeToString, - ), - "DeleteStoredInfoType": grpc.unary_unary_rpc_method_handler( - servicer.DeleteStoredInfoType, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.DeleteStoredInfoTypeRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "HybridInspectDlpJob": grpc.unary_unary_rpc_method_handler( - servicer.HybridInspectDlpJob, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.HybridInspectDlpJobRequest.FromString, - response_serializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.HybridInspectResponse.SerializeToString, - ), - "FinishDlpJob": grpc.unary_unary_rpc_method_handler( - servicer.FinishDlpJob, - request_deserializer=google_dot_cloud_dot_dlp__v2_dot_proto_dot_dlp__pb2.FinishDlpJobRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.privacy.dlp.v2.DlpService", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/google/cloud/dlp_v2/proto/storage.proto b/google/cloud/dlp_v2/proto/storage.proto index 9408d6de..b25cf4fe 100644 --- a/google/cloud/dlp_v2/proto/storage.proto +++ b/google/cloud/dlp_v2/proto/storage.proto @@ -18,6 +18,7 @@ package google.privacy.dlp.v2; import "google/api/resource.proto"; import "google/protobuf/timestamp.proto"; +import "google/api/annotations.proto"; option csharp_namespace = "Google.Cloud.Dlp.V2"; option go_package = "google.golang.org/genproto/googleapis/privacy/dlp/v2;dlp"; @@ -128,7 +129,7 @@ message CustomInfoType { // Message for detecting output from deidentification transformations // such as - // [`CryptoReplaceFfxFpeConfig`](/dlp/docs/reference/rest/v2/organizations.deidentifyTemplates#cryptoreplaceffxfpeconfig). + // [`CryptoReplaceFfxFpeConfig`](https://cloud.google.com/dlp/docs/reference/rest/v2/organizations.deidentifyTemplates#cryptoreplaceffxfpeconfig). // These types of transformations are // those that perform pseudonymization, thereby producing a "surrogate" as // output. This should be used in conjunction with a field on the @@ -520,12 +521,17 @@ message StorageConfig { TimespanConfig timespan_config = 6; } -// Definitions of file type groups to scan. +// Definitions of file type groups to scan. New types will be added to this +// list. enum FileType { // Includes all files. FILE_TYPE_UNSPECIFIED = 0; - // Includes all file extensions not covered by text file types. + // Includes all file extensions not covered by another entry. Binary + // scanning attempts to convert the content of the file to utf_8 to scan + // the file. + // If you wish to avoid this fall back, specify one or more of the other + // FileType's in your storage scan. BINARY_FILE = 1; // Included file extensions: @@ -542,10 +548,12 @@ enum FileType { // Image inspection is restricted to 'global', 'us', 'asia', and 'europe'. IMAGE = 3; + // Word files >30 MB will be scanned as binary files. // Included file extensions: // docx, dotx, docm, dotm WORD = 5; + // PDF files >30 MB will be scanned as binary files. // Included file extensions: // pdf PDF = 6; @@ -553,6 +561,14 @@ enum FileType { // Included file extensions: // avro AVRO = 7; + + // Included file extensions: + // csv + CSV = 8; + + // Included file extensions: + // tsv + TSV = 9; } // Configuration to control jobs where the content being inspected is outside diff --git a/google/cloud/dlp_v2/proto/storage_pb2.py b/google/cloud/dlp_v2/proto/storage_pb2.py deleted file mode 100644 index 151dd3db..00000000 --- a/google/cloud/dlp_v2/proto/storage_pb2.py +++ /dev/null @@ -1,3552 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/dlp_v2/proto/storage.proto - -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/dlp_v2/proto/storage.proto", - package="google.privacy.dlp.v2", - syntax="proto3", - serialized_options=b"\n\031com.google.privacy.dlp.v2B\nDlpStorageP\001Z8google.golang.org/genproto/googleapis/privacy/dlp/v2;dlp\252\002\023Google.Cloud.Dlp.V2\312\002\023Google\\Cloud\\Dlp\\V2\352\002\026Google::Cloud::Dlp::V2", - serialized_pb=b'\n\'google/cloud/dlp_v2/proto/storage.proto\x12\x15google.privacy.dlp.v2\x1a\x19google/api/resource.proto\x1a\x1fgoogle/protobuf/timestamp.proto"\x18\n\x08InfoType\x12\x0c\n\x04name\x18\x01 \x01(\t"K\n\nStoredType\x12\x0c\n\x04name\x18\x01 \x01(\t\x12/\n\x0b\x63reate_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp"\xc8\x0b\n\x0e\x43ustomInfoType\x12\x32\n\tinfo_type\x18\x01 \x01(\x0b\x32\x1f.google.privacy.dlp.v2.InfoType\x12\x35\n\nlikelihood\x18\x06 \x01(\x0e\x32!.google.privacy.dlp.v2.Likelihood\x12\x46\n\ndictionary\x18\x02 \x01(\x0b\x32\x30.google.privacy.dlp.v2.CustomInfoType.DictionaryH\x00\x12<\n\x05regex\x18\x03 \x01(\x0b\x32+.google.privacy.dlp.v2.CustomInfoType.RegexH\x00\x12M\n\x0esurrogate_type\x18\x04 \x01(\x0b\x32\x33.google.privacy.dlp.v2.CustomInfoType.SurrogateTypeH\x00\x12\x38\n\x0bstored_type\x18\x05 \x01(\x0b\x32!.google.privacy.dlp.v2.StoredTypeH\x00\x12L\n\x0f\x64\x65tection_rules\x18\x07 \x03(\x0b\x32\x33.google.privacy.dlp.v2.CustomInfoType.DetectionRule\x12K\n\x0e\x65xclusion_type\x18\x08 \x01(\x0e\x32\x33.google.privacy.dlp.v2.CustomInfoType.ExclusionType\x1a\xc8\x01\n\nDictionary\x12N\n\tword_list\x18\x01 \x01(\x0b\x32\x39.google.privacy.dlp.v2.CustomInfoType.Dictionary.WordListH\x00\x12\x45\n\x12\x63loud_storage_path\x18\x03 \x01(\x0b\x32\'.google.privacy.dlp.v2.CloudStoragePathH\x00\x1a\x19\n\x08WordList\x12\r\n\x05words\x18\x01 \x03(\tB\x08\n\x06source\x1a/\n\x05Regex\x12\x0f\n\x07pattern\x18\x01 \x01(\t\x12\x15\n\rgroup_indexes\x18\x02 \x03(\x05\x1a\x0f\n\rSurrogateType\x1a\xbe\x04\n\rDetectionRule\x12W\n\x0chotword_rule\x18\x01 \x01(\x0b\x32?.google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRuleH\x00\x1a\x38\n\tProximity\x12\x15\n\rwindow_before\x18\x01 \x01(\x05\x12\x14\n\x0cwindow_after\x18\x02 \x01(\x05\x1a\x82\x01\n\x14LikelihoodAdjustment\x12=\n\x10\x66ixed_likelihood\x18\x01 \x01(\x0e\x32!.google.privacy.dlp.v2.LikelihoodH\x00\x12\x1d\n\x13relative_likelihood\x18\x02 \x01(\x05H\x00\x42\x0c\n\nadjustment\x1a\x8c\x02\n\x0bHotwordRule\x12\x42\n\rhotword_regex\x18\x01 \x01(\x0b\x32+.google.privacy.dlp.v2.CustomInfoType.Regex\x12P\n\tproximity\x18\x02 \x01(\x0b\x32=.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity\x12g\n\x15likelihood_adjustment\x18\x03 \x01(\x0b\x32H.google.privacy.dlp.v2.CustomInfoType.DetectionRule.LikelihoodAdjustmentB\x06\n\x04type"K\n\rExclusionType\x12\x1e\n\x1a\x45XCLUSION_TYPE_UNSPECIFIED\x10\x00\x12\x1a\n\x16\x45XCLUSION_TYPE_EXCLUDE\x10\x01\x42\x06\n\x04type"\x17\n\x07\x46ieldId\x12\x0c\n\x04name\x18\x01 \x01(\t"7\n\x0bPartitionId\x12\x12\n\nproject_id\x18\x02 \x01(\t\x12\x14\n\x0cnamespace_id\x18\x04 \x01(\t"\x1e\n\x0eKindExpression\x12\x0c\n\x04name\x18\x01 \x01(\t"\x81\x01\n\x10\x44\x61tastoreOptions\x12\x38\n\x0cpartition_id\x18\x01 \x01(\x0b\x32".google.privacy.dlp.v2.PartitionId\x12\x33\n\x04kind\x18\x02 \x01(\x0b\x32%.google.privacy.dlp.v2.KindExpression"]\n\x18\x43loudStorageRegexFileSet\x12\x13\n\x0b\x62ucket_name\x18\x01 \x01(\t\x12\x15\n\rinclude_regex\x18\x02 \x03(\t\x12\x15\n\rexclude_regex\x18\x03 \x03(\t"\xec\x03\n\x13\x43loudStorageOptions\x12\x44\n\x08\x66ile_set\x18\x01 \x01(\x0b\x32\x32.google.privacy.dlp.v2.CloudStorageOptions.FileSet\x12\x1c\n\x14\x62ytes_limit_per_file\x18\x04 \x01(\x03\x12$\n\x1c\x62ytes_limit_per_file_percent\x18\x08 \x01(\x05\x12\x33\n\nfile_types\x18\x05 \x03(\x0e\x32\x1f.google.privacy.dlp.v2.FileType\x12N\n\rsample_method\x18\x06 \x01(\x0e\x32\x37.google.privacy.dlp.v2.CloudStorageOptions.SampleMethod\x12\x1b\n\x13\x66iles_limit_percent\x18\x07 \x01(\x05\x1a_\n\x07\x46ileSet\x12\x0b\n\x03url\x18\x01 \x01(\t\x12G\n\x0eregex_file_set\x18\x02 \x01(\x0b\x32/.google.privacy.dlp.v2.CloudStorageRegexFileSet"H\n\x0cSampleMethod\x12\x1d\n\x19SAMPLE_METHOD_UNSPECIFIED\x10\x00\x12\x07\n\x03TOP\x10\x01\x12\x10\n\x0cRANDOM_START\x10\x02""\n\x13\x43loudStorageFileSet\x12\x0b\n\x03url\x18\x01 \x01(\t" \n\x10\x43loudStoragePath\x12\x0c\n\x04path\x18\x01 \x01(\t"\x8b\x03\n\x0f\x42igQueryOptions\x12=\n\x0ftable_reference\x18\x01 \x01(\x0b\x32$.google.privacy.dlp.v2.BigQueryTable\x12:\n\x12identifying_fields\x18\x02 \x03(\x0b\x32\x1e.google.privacy.dlp.v2.FieldId\x12\x12\n\nrows_limit\x18\x03 \x01(\x03\x12\x1a\n\x12rows_limit_percent\x18\x06 \x01(\x05\x12J\n\rsample_method\x18\x04 \x01(\x0e\x32\x33.google.privacy.dlp.v2.BigQueryOptions.SampleMethod\x12\x37\n\x0f\x65xcluded_fields\x18\x05 \x03(\x0b\x32\x1e.google.privacy.dlp.v2.FieldId"H\n\x0cSampleMethod\x12\x1d\n\x19SAMPLE_METHOD_UNSPECIFIED\x10\x00\x12\x07\n\x03TOP\x10\x01\x12\x10\n\x0cRANDOM_START\x10\x02"\xda\x04\n\rStorageConfig\x12\x44\n\x11\x64\x61tastore_options\x18\x02 \x01(\x0b\x32\'.google.privacy.dlp.v2.DatastoreOptionsH\x00\x12K\n\x15\x63loud_storage_options\x18\x03 \x01(\x0b\x32*.google.privacy.dlp.v2.CloudStorageOptionsH\x00\x12\x43\n\x11\x62ig_query_options\x18\x04 \x01(\x0b\x32&.google.privacy.dlp.v2.BigQueryOptionsH\x00\x12>\n\x0ehybrid_options\x18\t \x01(\x0b\x32$.google.privacy.dlp.v2.HybridOptionsH\x00\x12L\n\x0ftimespan_config\x18\x06 \x01(\x0b\x32\x33.google.privacy.dlp.v2.StorageConfig.TimespanConfig\x1a\xda\x01\n\x0eTimespanConfig\x12.\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12\x37\n\x0ftimestamp_field\x18\x03 \x01(\x0b\x32\x1e.google.privacy.dlp.v2.FieldId\x12\x31\n)enable_auto_population_of_timespan_config\x18\x04 \x01(\x08\x42\x06\n\x04type"\xf6\x01\n\rHybridOptions\x12\x13\n\x0b\x64\x65scription\x18\x01 \x01(\t\x12#\n\x1brequired_finding_label_keys\x18\x02 \x03(\t\x12@\n\x06labels\x18\x03 \x03(\x0b\x32\x30.google.privacy.dlp.v2.HybridOptions.LabelsEntry\x12:\n\rtable_options\x18\x04 \x01(\x0b\x32#.google.privacy.dlp.v2.TableOptions\x1a-\n\x0bLabelsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"`\n\x0b\x42igQueryKey\x12=\n\x0ftable_reference\x18\x01 \x01(\x0b\x32$.google.privacy.dlp.v2.BigQueryTable\x12\x12\n\nrow_number\x18\x02 \x01(\x03">\n\x0c\x44\x61tastoreKey\x12.\n\nentity_key\x18\x01 \x01(\x0b\x32\x1a.google.privacy.dlp.v2.Key"\xbb\x01\n\x03Key\x12\x38\n\x0cpartition_id\x18\x01 \x01(\x0b\x32".google.privacy.dlp.v2.PartitionId\x12\x34\n\x04path\x18\x02 \x03(\x0b\x32&.google.privacy.dlp.v2.Key.PathElement\x1a\x44\n\x0bPathElement\x12\x0c\n\x04kind\x18\x01 \x01(\t\x12\x0c\n\x02id\x18\x02 \x01(\x03H\x00\x12\x0e\n\x04name\x18\x03 \x01(\tH\x00\x42\t\n\x07id_type"\xa1\x01\n\tRecordKey\x12<\n\rdatastore_key\x18\x02 \x01(\x0b\x32#.google.privacy.dlp.v2.DatastoreKeyH\x00\x12;\n\rbig_query_key\x18\x03 \x01(\x0b\x32".google.privacy.dlp.v2.BigQueryKeyH\x00\x12\x11\n\tid_values\x18\x05 \x03(\tB\x06\n\x04type"I\n\rBigQueryTable\x12\x12\n\nproject_id\x18\x01 \x01(\t\x12\x12\n\ndataset_id\x18\x02 \x01(\t\x12\x10\n\x08table_id\x18\x03 \x01(\t"s\n\rBigQueryField\x12\x33\n\x05table\x18\x01 \x01(\x0b\x32$.google.privacy.dlp.v2.BigQueryTable\x12-\n\x05\x66ield\x18\x02 \x01(\x0b\x32\x1e.google.privacy.dlp.v2.FieldId"9\n\x08\x45ntityId\x12-\n\x05\x66ield\x18\x01 \x01(\x0b\x32\x1e.google.privacy.dlp.v2.FieldId"J\n\x0cTableOptions\x12:\n\x12identifying_fields\x18\x01 \x03(\x0b\x32\x1e.google.privacy.dlp.v2.FieldId*t\n\nLikelihood\x12\x1a\n\x16LIKELIHOOD_UNSPECIFIED\x10\x00\x12\x11\n\rVERY_UNLIKELY\x10\x01\x12\x0c\n\x08UNLIKELY\x10\x02\x12\x0c\n\x08POSSIBLE\x10\x03\x12\n\n\x06LIKELY\x10\x04\x12\x0f\n\x0bVERY_LIKELY\x10\x05*m\n\x08\x46ileType\x12\x19\n\x15\x46ILE_TYPE_UNSPECIFIED\x10\x00\x12\x0f\n\x0b\x42INARY_FILE\x10\x01\x12\r\n\tTEXT_FILE\x10\x02\x12\t\n\x05IMAGE\x10\x03\x12\x08\n\x04WORD\x10\x05\x12\x07\n\x03PDF\x10\x06\x12\x08\n\x04\x41VRO\x10\x07\x42\xa8\x01\n\x19\x63om.google.privacy.dlp.v2B\nDlpStorageP\x01Z8google.golang.org/genproto/googleapis/privacy/dlp/v2;dlp\xaa\x02\x13Google.Cloud.Dlp.V2\xca\x02\x13Google\\Cloud\\Dlp\\V2\xea\x02\x16Google::Cloud::Dlp::V2b\x06proto3', - dependencies=[ - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, - ], -) - -_LIKELIHOOD = _descriptor.EnumDescriptor( - name="Likelihood", - full_name="google.privacy.dlp.v2.Likelihood", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="LIKELIHOOD_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="VERY_UNLIKELY", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="UNLIKELY", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="POSSIBLE", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="LIKELY", index=4, number=4, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="VERY_LIKELY", index=5, number=5, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=4713, - serialized_end=4829, -) -_sym_db.RegisterEnumDescriptor(_LIKELIHOOD) - -Likelihood = enum_type_wrapper.EnumTypeWrapper(_LIKELIHOOD) -_FILETYPE = _descriptor.EnumDescriptor( - name="FileType", - full_name="google.privacy.dlp.v2.FileType", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="FILE_TYPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="BINARY_FILE", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="TEXT_FILE", index=2, number=2, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="IMAGE", index=3, number=3, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="WORD", index=4, number=5, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="PDF", index=5, number=6, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="AVRO", index=6, number=7, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=4831, - serialized_end=4940, -) -_sym_db.RegisterEnumDescriptor(_FILETYPE) - -FileType = enum_type_wrapper.EnumTypeWrapper(_FILETYPE) -LIKELIHOOD_UNSPECIFIED = 0 -VERY_UNLIKELY = 1 -UNLIKELY = 2 -POSSIBLE = 3 -LIKELY = 4 -VERY_LIKELY = 5 -FILE_TYPE_UNSPECIFIED = 0 -BINARY_FILE = 1 -TEXT_FILE = 2 -IMAGE = 3 -WORD = 5 -PDF = 6 -AVRO = 7 - - -_CUSTOMINFOTYPE_EXCLUSIONTYPE = _descriptor.EnumDescriptor( - name="ExclusionType", - full_name="google.privacy.dlp.v2.CustomInfoType.ExclusionType", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="EXCLUSION_TYPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="EXCLUSION_TYPE_EXCLUDE", - index=1, - number=1, - serialized_options=None, - type=None, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1627, - serialized_end=1702, -) -_sym_db.RegisterEnumDescriptor(_CUSTOMINFOTYPE_EXCLUSIONTYPE) - -_CLOUDSTORAGEOPTIONS_SAMPLEMETHOD = _descriptor.EnumDescriptor( - name="SampleMethod", - full_name="google.privacy.dlp.v2.CloudStorageOptions.SampleMethod", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="SAMPLE_METHOD_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="TOP", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="RANDOM_START", index=2, number=2, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2474, - serialized_end=2546, -) -_sym_db.RegisterEnumDescriptor(_CLOUDSTORAGEOPTIONS_SAMPLEMETHOD) - -_BIGQUERYOPTIONS_SAMPLEMETHOD = _descriptor.EnumDescriptor( - name="SampleMethod", - full_name="google.privacy.dlp.v2.BigQueryOptions.SampleMethod", - filename=None, - file=DESCRIPTOR, - values=[ - _descriptor.EnumValueDescriptor( - name="SAMPLE_METHOD_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - ), - _descriptor.EnumValueDescriptor( - name="TOP", index=1, number=1, serialized_options=None, type=None - ), - _descriptor.EnumValueDescriptor( - name="RANDOM_START", index=2, number=2, serialized_options=None, type=None - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=2474, - serialized_end=2546, -) -_sym_db.RegisterEnumDescriptor(_BIGQUERYOPTIONS_SAMPLEMETHOD) - - -_INFOTYPE = _descriptor.Descriptor( - name="InfoType", - full_name="google.privacy.dlp.v2.InfoType", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.privacy.dlp.v2.InfoType.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=126, - serialized_end=150, -) - - -_STOREDTYPE = _descriptor.Descriptor( - name="StoredType", - full_name="google.privacy.dlp.v2.StoredType", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.privacy.dlp.v2.StoredType.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="create_time", - full_name="google.privacy.dlp.v2.StoredType.create_time", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=152, - serialized_end=227, -) - - -_CUSTOMINFOTYPE_DICTIONARY_WORDLIST = _descriptor.Descriptor( - name="WordList", - full_name="google.privacy.dlp.v2.CustomInfoType.Dictionary.WordList", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="words", - full_name="google.privacy.dlp.v2.CustomInfoType.Dictionary.WordList.words", - index=0, - number=1, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=947, - serialized_end=972, -) - -_CUSTOMINFOTYPE_DICTIONARY = _descriptor.Descriptor( - name="Dictionary", - full_name="google.privacy.dlp.v2.CustomInfoType.Dictionary", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="word_list", - full_name="google.privacy.dlp.v2.CustomInfoType.Dictionary.word_list", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cloud_storage_path", - full_name="google.privacy.dlp.v2.CustomInfoType.Dictionary.cloud_storage_path", - index=1, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_CUSTOMINFOTYPE_DICTIONARY_WORDLIST], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="source", - full_name="google.privacy.dlp.v2.CustomInfoType.Dictionary.source", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=782, - serialized_end=982, -) - -_CUSTOMINFOTYPE_REGEX = _descriptor.Descriptor( - name="Regex", - full_name="google.privacy.dlp.v2.CustomInfoType.Regex", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="pattern", - full_name="google.privacy.dlp.v2.CustomInfoType.Regex.pattern", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="group_indexes", - full_name="google.privacy.dlp.v2.CustomInfoType.Regex.group_indexes", - index=1, - number=2, - type=5, - cpp_type=1, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=984, - serialized_end=1031, -) - -_CUSTOMINFOTYPE_SURROGATETYPE = _descriptor.Descriptor( - name="SurrogateType", - full_name="google.privacy.dlp.v2.CustomInfoType.SurrogateType", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1033, - serialized_end=1048, -) - -_CUSTOMINFOTYPE_DETECTIONRULE_PROXIMITY = _descriptor.Descriptor( - name="Proximity", - full_name="google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="window_before", - full_name="google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity.window_before", - index=0, - number=1, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="window_after", - full_name="google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity.window_after", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1157, - serialized_end=1213, -) - -_CUSTOMINFOTYPE_DETECTIONRULE_LIKELIHOODADJUSTMENT = _descriptor.Descriptor( - name="LikelihoodAdjustment", - full_name="google.privacy.dlp.v2.CustomInfoType.DetectionRule.LikelihoodAdjustment", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="fixed_likelihood", - full_name="google.privacy.dlp.v2.CustomInfoType.DetectionRule.LikelihoodAdjustment.fixed_likelihood", - index=0, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="relative_likelihood", - full_name="google.privacy.dlp.v2.CustomInfoType.DetectionRule.LikelihoodAdjustment.relative_likelihood", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="adjustment", - full_name="google.privacy.dlp.v2.CustomInfoType.DetectionRule.LikelihoodAdjustment.adjustment", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=1216, - serialized_end=1346, -) - -_CUSTOMINFOTYPE_DETECTIONRULE_HOTWORDRULE = _descriptor.Descriptor( - name="HotwordRule", - full_name="google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="hotword_regex", - full_name="google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule.hotword_regex", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="proximity", - full_name="google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule.proximity", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="likelihood_adjustment", - full_name="google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule.likelihood_adjustment", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1349, - serialized_end=1617, -) - -_CUSTOMINFOTYPE_DETECTIONRULE = _descriptor.Descriptor( - name="DetectionRule", - full_name="google.privacy.dlp.v2.CustomInfoType.DetectionRule", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="hotword_rule", - full_name="google.privacy.dlp.v2.CustomInfoType.DetectionRule.hotword_rule", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[ - _CUSTOMINFOTYPE_DETECTIONRULE_PROXIMITY, - _CUSTOMINFOTYPE_DETECTIONRULE_LIKELIHOODADJUSTMENT, - _CUSTOMINFOTYPE_DETECTIONRULE_HOTWORDRULE, - ], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="type", - full_name="google.privacy.dlp.v2.CustomInfoType.DetectionRule.type", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=1051, - serialized_end=1625, -) - -_CUSTOMINFOTYPE = _descriptor.Descriptor( - name="CustomInfoType", - full_name="google.privacy.dlp.v2.CustomInfoType", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="info_type", - full_name="google.privacy.dlp.v2.CustomInfoType.info_type", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="likelihood", - full_name="google.privacy.dlp.v2.CustomInfoType.likelihood", - index=1, - number=6, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="dictionary", - full_name="google.privacy.dlp.v2.CustomInfoType.dictionary", - index=2, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="regex", - full_name="google.privacy.dlp.v2.CustomInfoType.regex", - index=3, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="surrogate_type", - full_name="google.privacy.dlp.v2.CustomInfoType.surrogate_type", - index=4, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="stored_type", - full_name="google.privacy.dlp.v2.CustomInfoType.stored_type", - index=5, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="detection_rules", - full_name="google.privacy.dlp.v2.CustomInfoType.detection_rules", - index=6, - number=7, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="exclusion_type", - full_name="google.privacy.dlp.v2.CustomInfoType.exclusion_type", - index=7, - number=8, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[ - _CUSTOMINFOTYPE_DICTIONARY, - _CUSTOMINFOTYPE_REGEX, - _CUSTOMINFOTYPE_SURROGATETYPE, - _CUSTOMINFOTYPE_DETECTIONRULE, - ], - enum_types=[_CUSTOMINFOTYPE_EXCLUSIONTYPE], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="type", - full_name="google.privacy.dlp.v2.CustomInfoType.type", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=230, - serialized_end=1710, -) - - -_FIELDID = _descriptor.Descriptor( - name="FieldId", - full_name="google.privacy.dlp.v2.FieldId", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.privacy.dlp.v2.FieldId.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1712, - serialized_end=1735, -) - - -_PARTITIONID = _descriptor.Descriptor( - name="PartitionId", - full_name="google.privacy.dlp.v2.PartitionId", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.privacy.dlp.v2.PartitionId.project_id", - index=0, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="namespace_id", - full_name="google.privacy.dlp.v2.PartitionId.namespace_id", - index=1, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1737, - serialized_end=1792, -) - - -_KINDEXPRESSION = _descriptor.Descriptor( - name="KindExpression", - full_name="google.privacy.dlp.v2.KindExpression", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.privacy.dlp.v2.KindExpression.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1794, - serialized_end=1824, -) - - -_DATASTOREOPTIONS = _descriptor.Descriptor( - name="DatastoreOptions", - full_name="google.privacy.dlp.v2.DatastoreOptions", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="partition_id", - full_name="google.privacy.dlp.v2.DatastoreOptions.partition_id", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="kind", - full_name="google.privacy.dlp.v2.DatastoreOptions.kind", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1827, - serialized_end=1956, -) - - -_CLOUDSTORAGEREGEXFILESET = _descriptor.Descriptor( - name="CloudStorageRegexFileSet", - full_name="google.privacy.dlp.v2.CloudStorageRegexFileSet", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="bucket_name", - full_name="google.privacy.dlp.v2.CloudStorageRegexFileSet.bucket_name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="include_regex", - full_name="google.privacy.dlp.v2.CloudStorageRegexFileSet.include_regex", - index=1, - number=2, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="exclude_regex", - full_name="google.privacy.dlp.v2.CloudStorageRegexFileSet.exclude_regex", - index=2, - number=3, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1958, - serialized_end=2051, -) - - -_CLOUDSTORAGEOPTIONS_FILESET = _descriptor.Descriptor( - name="FileSet", - full_name="google.privacy.dlp.v2.CloudStorageOptions.FileSet", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="url", - full_name="google.privacy.dlp.v2.CloudStorageOptions.FileSet.url", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="regex_file_set", - full_name="google.privacy.dlp.v2.CloudStorageOptions.FileSet.regex_file_set", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2377, - serialized_end=2472, -) - -_CLOUDSTORAGEOPTIONS = _descriptor.Descriptor( - name="CloudStorageOptions", - full_name="google.privacy.dlp.v2.CloudStorageOptions", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="file_set", - full_name="google.privacy.dlp.v2.CloudStorageOptions.file_set", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="bytes_limit_per_file", - full_name="google.privacy.dlp.v2.CloudStorageOptions.bytes_limit_per_file", - index=1, - number=4, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="bytes_limit_per_file_percent", - full_name="google.privacy.dlp.v2.CloudStorageOptions.bytes_limit_per_file_percent", - index=2, - number=8, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="file_types", - full_name="google.privacy.dlp.v2.CloudStorageOptions.file_types", - index=3, - number=5, - type=14, - cpp_type=8, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="sample_method", - full_name="google.privacy.dlp.v2.CloudStorageOptions.sample_method", - index=4, - number=6, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="files_limit_percent", - full_name="google.privacy.dlp.v2.CloudStorageOptions.files_limit_percent", - index=5, - number=7, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_CLOUDSTORAGEOPTIONS_FILESET], - enum_types=[_CLOUDSTORAGEOPTIONS_SAMPLEMETHOD], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2054, - serialized_end=2546, -) - - -_CLOUDSTORAGEFILESET = _descriptor.Descriptor( - name="CloudStorageFileSet", - full_name="google.privacy.dlp.v2.CloudStorageFileSet", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="url", - full_name="google.privacy.dlp.v2.CloudStorageFileSet.url", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2548, - serialized_end=2582, -) - - -_CLOUDSTORAGEPATH = _descriptor.Descriptor( - name="CloudStoragePath", - full_name="google.privacy.dlp.v2.CloudStoragePath", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="path", - full_name="google.privacy.dlp.v2.CloudStoragePath.path", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2584, - serialized_end=2616, -) - - -_BIGQUERYOPTIONS = _descriptor.Descriptor( - name="BigQueryOptions", - full_name="google.privacy.dlp.v2.BigQueryOptions", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="table_reference", - full_name="google.privacy.dlp.v2.BigQueryOptions.table_reference", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="identifying_fields", - full_name="google.privacy.dlp.v2.BigQueryOptions.identifying_fields", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="rows_limit", - full_name="google.privacy.dlp.v2.BigQueryOptions.rows_limit", - index=2, - number=3, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="rows_limit_percent", - full_name="google.privacy.dlp.v2.BigQueryOptions.rows_limit_percent", - index=3, - number=6, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="sample_method", - full_name="google.privacy.dlp.v2.BigQueryOptions.sample_method", - index=4, - number=4, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="excluded_fields", - full_name="google.privacy.dlp.v2.BigQueryOptions.excluded_fields", - index=5, - number=5, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_BIGQUERYOPTIONS_SAMPLEMETHOD], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=2619, - serialized_end=3014, -) - - -_STORAGECONFIG_TIMESPANCONFIG = _descriptor.Descriptor( - name="TimespanConfig", - full_name="google.privacy.dlp.v2.StorageConfig.TimespanConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="start_time", - full_name="google.privacy.dlp.v2.StorageConfig.TimespanConfig.start_time", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="end_time", - full_name="google.privacy.dlp.v2.StorageConfig.TimespanConfig.end_time", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="timestamp_field", - full_name="google.privacy.dlp.v2.StorageConfig.TimespanConfig.timestamp_field", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="enable_auto_population_of_timespan_config", - full_name="google.privacy.dlp.v2.StorageConfig.TimespanConfig.enable_auto_population_of_timespan_config", - index=3, - number=4, - type=8, - cpp_type=7, - label=1, - has_default_value=False, - default_value=False, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3393, - serialized_end=3611, -) - -_STORAGECONFIG = _descriptor.Descriptor( - name="StorageConfig", - full_name="google.privacy.dlp.v2.StorageConfig", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="datastore_options", - full_name="google.privacy.dlp.v2.StorageConfig.datastore_options", - index=0, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="cloud_storage_options", - full_name="google.privacy.dlp.v2.StorageConfig.cloud_storage_options", - index=1, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="big_query_options", - full_name="google.privacy.dlp.v2.StorageConfig.big_query_options", - index=2, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="hybrid_options", - full_name="google.privacy.dlp.v2.StorageConfig.hybrid_options", - index=3, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="timespan_config", - full_name="google.privacy.dlp.v2.StorageConfig.timespan_config", - index=4, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_STORAGECONFIG_TIMESPANCONFIG], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="type", - full_name="google.privacy.dlp.v2.StorageConfig.type", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=3017, - serialized_end=3619, -) - - -_HYBRIDOPTIONS_LABELSENTRY = _descriptor.Descriptor( - name="LabelsEntry", - full_name="google.privacy.dlp.v2.HybridOptions.LabelsEntry", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="key", - full_name="google.privacy.dlp.v2.HybridOptions.LabelsEntry.key", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.privacy.dlp.v2.HybridOptions.LabelsEntry.value", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"8\001", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3823, - serialized_end=3868, -) - -_HYBRIDOPTIONS = _descriptor.Descriptor( - name="HybridOptions", - full_name="google.privacy.dlp.v2.HybridOptions", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="description", - full_name="google.privacy.dlp.v2.HybridOptions.description", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="required_finding_label_keys", - full_name="google.privacy.dlp.v2.HybridOptions.required_finding_label_keys", - index=1, - number=2, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="labels", - full_name="google.privacy.dlp.v2.HybridOptions.labels", - index=2, - number=3, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="table_options", - full_name="google.privacy.dlp.v2.HybridOptions.table_options", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_HYBRIDOPTIONS_LABELSENTRY], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3622, - serialized_end=3868, -) - - -_BIGQUERYKEY = _descriptor.Descriptor( - name="BigQueryKey", - full_name="google.privacy.dlp.v2.BigQueryKey", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="table_reference", - full_name="google.privacy.dlp.v2.BigQueryKey.table_reference", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="row_number", - full_name="google.privacy.dlp.v2.BigQueryKey.row_number", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3870, - serialized_end=3966, -) - - -_DATASTOREKEY = _descriptor.Descriptor( - name="DatastoreKey", - full_name="google.privacy.dlp.v2.DatastoreKey", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="entity_key", - full_name="google.privacy.dlp.v2.DatastoreKey.entity_key", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=3968, - serialized_end=4030, -) - - -_KEY_PATHELEMENT = _descriptor.Descriptor( - name="PathElement", - full_name="google.privacy.dlp.v2.Key.PathElement", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="kind", - full_name="google.privacy.dlp.v2.Key.PathElement.kind", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="id", - full_name="google.privacy.dlp.v2.Key.PathElement.id", - index=1, - number=2, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="name", - full_name="google.privacy.dlp.v2.Key.PathElement.name", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="id_type", - full_name="google.privacy.dlp.v2.Key.PathElement.id_type", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=4152, - serialized_end=4220, -) - -_KEY = _descriptor.Descriptor( - name="Key", - full_name="google.privacy.dlp.v2.Key", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="partition_id", - full_name="google.privacy.dlp.v2.Key.partition_id", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="path", - full_name="google.privacy.dlp.v2.Key.path", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[_KEY_PATHELEMENT], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4033, - serialized_end=4220, -) - - -_RECORDKEY = _descriptor.Descriptor( - name="RecordKey", - full_name="google.privacy.dlp.v2.RecordKey", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="datastore_key", - full_name="google.privacy.dlp.v2.RecordKey.datastore_key", - index=0, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="big_query_key", - full_name="google.privacy.dlp.v2.RecordKey.big_query_key", - index=1, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="id_values", - full_name="google.privacy.dlp.v2.RecordKey.id_values", - index=2, - number=5, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="type", - full_name="google.privacy.dlp.v2.RecordKey.type", - index=0, - containing_type=None, - fields=[], - ) - ], - serialized_start=4223, - serialized_end=4384, -) - - -_BIGQUERYTABLE = _descriptor.Descriptor( - name="BigQueryTable", - full_name="google.privacy.dlp.v2.BigQueryTable", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="project_id", - full_name="google.privacy.dlp.v2.BigQueryTable.project_id", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="dataset_id", - full_name="google.privacy.dlp.v2.BigQueryTable.dataset_id", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="table_id", - full_name="google.privacy.dlp.v2.BigQueryTable.table_id", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4386, - serialized_end=4459, -) - - -_BIGQUERYFIELD = _descriptor.Descriptor( - name="BigQueryField", - full_name="google.privacy.dlp.v2.BigQueryField", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="table", - full_name="google.privacy.dlp.v2.BigQueryField.table", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - _descriptor.FieldDescriptor( - name="field", - full_name="google.privacy.dlp.v2.BigQueryField.field", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4461, - serialized_end=4576, -) - - -_ENTITYID = _descriptor.Descriptor( - name="EntityId", - full_name="google.privacy.dlp.v2.EntityId", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="field", - full_name="google.privacy.dlp.v2.EntityId.field", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4578, - serialized_end=4635, -) - - -_TABLEOPTIONS = _descriptor.Descriptor( - name="TableOptions", - full_name="google.privacy.dlp.v2.TableOptions", - filename=None, - file=DESCRIPTOR, - containing_type=None, - fields=[ - _descriptor.FieldDescriptor( - name="identifying_fields", - full_name="google.privacy.dlp.v2.TableOptions.identifying_fields", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - ) - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=4637, - serialized_end=4711, -) - -_STOREDTYPE.fields_by_name[ - "create_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_CUSTOMINFOTYPE_DICTIONARY_WORDLIST.containing_type = _CUSTOMINFOTYPE_DICTIONARY -_CUSTOMINFOTYPE_DICTIONARY.fields_by_name[ - "word_list" -].message_type = _CUSTOMINFOTYPE_DICTIONARY_WORDLIST -_CUSTOMINFOTYPE_DICTIONARY.fields_by_name[ - "cloud_storage_path" -].message_type = _CLOUDSTORAGEPATH -_CUSTOMINFOTYPE_DICTIONARY.containing_type = _CUSTOMINFOTYPE -_CUSTOMINFOTYPE_DICTIONARY.oneofs_by_name["source"].fields.append( - _CUSTOMINFOTYPE_DICTIONARY.fields_by_name["word_list"] -) -_CUSTOMINFOTYPE_DICTIONARY.fields_by_name[ - "word_list" -].containing_oneof = _CUSTOMINFOTYPE_DICTIONARY.oneofs_by_name["source"] -_CUSTOMINFOTYPE_DICTIONARY.oneofs_by_name["source"].fields.append( - _CUSTOMINFOTYPE_DICTIONARY.fields_by_name["cloud_storage_path"] -) -_CUSTOMINFOTYPE_DICTIONARY.fields_by_name[ - "cloud_storage_path" -].containing_oneof = _CUSTOMINFOTYPE_DICTIONARY.oneofs_by_name["source"] -_CUSTOMINFOTYPE_REGEX.containing_type = _CUSTOMINFOTYPE -_CUSTOMINFOTYPE_SURROGATETYPE.containing_type = _CUSTOMINFOTYPE -_CUSTOMINFOTYPE_DETECTIONRULE_PROXIMITY.containing_type = _CUSTOMINFOTYPE_DETECTIONRULE -_CUSTOMINFOTYPE_DETECTIONRULE_LIKELIHOODADJUSTMENT.fields_by_name[ - "fixed_likelihood" -].enum_type = _LIKELIHOOD -_CUSTOMINFOTYPE_DETECTIONRULE_LIKELIHOODADJUSTMENT.containing_type = ( - _CUSTOMINFOTYPE_DETECTIONRULE -) -_CUSTOMINFOTYPE_DETECTIONRULE_LIKELIHOODADJUSTMENT.oneofs_by_name[ - "adjustment" -].fields.append( - _CUSTOMINFOTYPE_DETECTIONRULE_LIKELIHOODADJUSTMENT.fields_by_name[ - "fixed_likelihood" - ] -) -_CUSTOMINFOTYPE_DETECTIONRULE_LIKELIHOODADJUSTMENT.fields_by_name[ - "fixed_likelihood" -].containing_oneof = _CUSTOMINFOTYPE_DETECTIONRULE_LIKELIHOODADJUSTMENT.oneofs_by_name[ - "adjustment" -] -_CUSTOMINFOTYPE_DETECTIONRULE_LIKELIHOODADJUSTMENT.oneofs_by_name[ - "adjustment" -].fields.append( - _CUSTOMINFOTYPE_DETECTIONRULE_LIKELIHOODADJUSTMENT.fields_by_name[ - "relative_likelihood" - ] -) -_CUSTOMINFOTYPE_DETECTIONRULE_LIKELIHOODADJUSTMENT.fields_by_name[ - "relative_likelihood" -].containing_oneof = _CUSTOMINFOTYPE_DETECTIONRULE_LIKELIHOODADJUSTMENT.oneofs_by_name[ - "adjustment" -] -_CUSTOMINFOTYPE_DETECTIONRULE_HOTWORDRULE.fields_by_name[ - "hotword_regex" -].message_type = _CUSTOMINFOTYPE_REGEX -_CUSTOMINFOTYPE_DETECTIONRULE_HOTWORDRULE.fields_by_name[ - "proximity" -].message_type = _CUSTOMINFOTYPE_DETECTIONRULE_PROXIMITY -_CUSTOMINFOTYPE_DETECTIONRULE_HOTWORDRULE.fields_by_name[ - "likelihood_adjustment" -].message_type = _CUSTOMINFOTYPE_DETECTIONRULE_LIKELIHOODADJUSTMENT -_CUSTOMINFOTYPE_DETECTIONRULE_HOTWORDRULE.containing_type = ( - _CUSTOMINFOTYPE_DETECTIONRULE -) -_CUSTOMINFOTYPE_DETECTIONRULE.fields_by_name[ - "hotword_rule" -].message_type = _CUSTOMINFOTYPE_DETECTIONRULE_HOTWORDRULE -_CUSTOMINFOTYPE_DETECTIONRULE.containing_type = _CUSTOMINFOTYPE -_CUSTOMINFOTYPE_DETECTIONRULE.oneofs_by_name["type"].fields.append( - _CUSTOMINFOTYPE_DETECTIONRULE.fields_by_name["hotword_rule"] -) -_CUSTOMINFOTYPE_DETECTIONRULE.fields_by_name[ - "hotword_rule" -].containing_oneof = _CUSTOMINFOTYPE_DETECTIONRULE.oneofs_by_name["type"] -_CUSTOMINFOTYPE.fields_by_name["info_type"].message_type = _INFOTYPE -_CUSTOMINFOTYPE.fields_by_name["likelihood"].enum_type = _LIKELIHOOD -_CUSTOMINFOTYPE.fields_by_name["dictionary"].message_type = _CUSTOMINFOTYPE_DICTIONARY -_CUSTOMINFOTYPE.fields_by_name["regex"].message_type = _CUSTOMINFOTYPE_REGEX -_CUSTOMINFOTYPE.fields_by_name[ - "surrogate_type" -].message_type = _CUSTOMINFOTYPE_SURROGATETYPE -_CUSTOMINFOTYPE.fields_by_name["stored_type"].message_type = _STOREDTYPE -_CUSTOMINFOTYPE.fields_by_name[ - "detection_rules" -].message_type = _CUSTOMINFOTYPE_DETECTIONRULE -_CUSTOMINFOTYPE.fields_by_name[ - "exclusion_type" -].enum_type = _CUSTOMINFOTYPE_EXCLUSIONTYPE -_CUSTOMINFOTYPE_EXCLUSIONTYPE.containing_type = _CUSTOMINFOTYPE -_CUSTOMINFOTYPE.oneofs_by_name["type"].fields.append( - _CUSTOMINFOTYPE.fields_by_name["dictionary"] -) -_CUSTOMINFOTYPE.fields_by_name[ - "dictionary" -].containing_oneof = _CUSTOMINFOTYPE.oneofs_by_name["type"] -_CUSTOMINFOTYPE.oneofs_by_name["type"].fields.append( - _CUSTOMINFOTYPE.fields_by_name["regex"] -) -_CUSTOMINFOTYPE.fields_by_name[ - "regex" -].containing_oneof = _CUSTOMINFOTYPE.oneofs_by_name["type"] -_CUSTOMINFOTYPE.oneofs_by_name["type"].fields.append( - _CUSTOMINFOTYPE.fields_by_name["surrogate_type"] -) -_CUSTOMINFOTYPE.fields_by_name[ - "surrogate_type" -].containing_oneof = _CUSTOMINFOTYPE.oneofs_by_name["type"] -_CUSTOMINFOTYPE.oneofs_by_name["type"].fields.append( - _CUSTOMINFOTYPE.fields_by_name["stored_type"] -) -_CUSTOMINFOTYPE.fields_by_name[ - "stored_type" -].containing_oneof = _CUSTOMINFOTYPE.oneofs_by_name["type"] -_DATASTOREOPTIONS.fields_by_name["partition_id"].message_type = _PARTITIONID -_DATASTOREOPTIONS.fields_by_name["kind"].message_type = _KINDEXPRESSION -_CLOUDSTORAGEOPTIONS_FILESET.fields_by_name[ - "regex_file_set" -].message_type = _CLOUDSTORAGEREGEXFILESET -_CLOUDSTORAGEOPTIONS_FILESET.containing_type = _CLOUDSTORAGEOPTIONS -_CLOUDSTORAGEOPTIONS.fields_by_name[ - "file_set" -].message_type = _CLOUDSTORAGEOPTIONS_FILESET -_CLOUDSTORAGEOPTIONS.fields_by_name["file_types"].enum_type = _FILETYPE -_CLOUDSTORAGEOPTIONS.fields_by_name[ - "sample_method" -].enum_type = _CLOUDSTORAGEOPTIONS_SAMPLEMETHOD -_CLOUDSTORAGEOPTIONS_SAMPLEMETHOD.containing_type = _CLOUDSTORAGEOPTIONS -_BIGQUERYOPTIONS.fields_by_name["table_reference"].message_type = _BIGQUERYTABLE -_BIGQUERYOPTIONS.fields_by_name["identifying_fields"].message_type = _FIELDID -_BIGQUERYOPTIONS.fields_by_name[ - "sample_method" -].enum_type = _BIGQUERYOPTIONS_SAMPLEMETHOD -_BIGQUERYOPTIONS.fields_by_name["excluded_fields"].message_type = _FIELDID -_BIGQUERYOPTIONS_SAMPLEMETHOD.containing_type = _BIGQUERYOPTIONS -_STORAGECONFIG_TIMESPANCONFIG.fields_by_name[ - "start_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_STORAGECONFIG_TIMESPANCONFIG.fields_by_name[ - "end_time" -].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP -_STORAGECONFIG_TIMESPANCONFIG.fields_by_name["timestamp_field"].message_type = _FIELDID -_STORAGECONFIG_TIMESPANCONFIG.containing_type = _STORAGECONFIG -_STORAGECONFIG.fields_by_name["datastore_options"].message_type = _DATASTOREOPTIONS -_STORAGECONFIG.fields_by_name[ - "cloud_storage_options" -].message_type = _CLOUDSTORAGEOPTIONS -_STORAGECONFIG.fields_by_name["big_query_options"].message_type = _BIGQUERYOPTIONS -_STORAGECONFIG.fields_by_name["hybrid_options"].message_type = _HYBRIDOPTIONS -_STORAGECONFIG.fields_by_name[ - "timespan_config" -].message_type = _STORAGECONFIG_TIMESPANCONFIG -_STORAGECONFIG.oneofs_by_name["type"].fields.append( - _STORAGECONFIG.fields_by_name["datastore_options"] -) -_STORAGECONFIG.fields_by_name[ - "datastore_options" -].containing_oneof = _STORAGECONFIG.oneofs_by_name["type"] -_STORAGECONFIG.oneofs_by_name["type"].fields.append( - _STORAGECONFIG.fields_by_name["cloud_storage_options"] -) -_STORAGECONFIG.fields_by_name[ - "cloud_storage_options" -].containing_oneof = _STORAGECONFIG.oneofs_by_name["type"] -_STORAGECONFIG.oneofs_by_name["type"].fields.append( - _STORAGECONFIG.fields_by_name["big_query_options"] -) -_STORAGECONFIG.fields_by_name[ - "big_query_options" -].containing_oneof = _STORAGECONFIG.oneofs_by_name["type"] -_STORAGECONFIG.oneofs_by_name["type"].fields.append( - _STORAGECONFIG.fields_by_name["hybrid_options"] -) -_STORAGECONFIG.fields_by_name[ - "hybrid_options" -].containing_oneof = _STORAGECONFIG.oneofs_by_name["type"] -_HYBRIDOPTIONS_LABELSENTRY.containing_type = _HYBRIDOPTIONS -_HYBRIDOPTIONS.fields_by_name["labels"].message_type = _HYBRIDOPTIONS_LABELSENTRY -_HYBRIDOPTIONS.fields_by_name["table_options"].message_type = _TABLEOPTIONS -_BIGQUERYKEY.fields_by_name["table_reference"].message_type = _BIGQUERYTABLE -_DATASTOREKEY.fields_by_name["entity_key"].message_type = _KEY -_KEY_PATHELEMENT.containing_type = _KEY -_KEY_PATHELEMENT.oneofs_by_name["id_type"].fields.append( - _KEY_PATHELEMENT.fields_by_name["id"] -) -_KEY_PATHELEMENT.fields_by_name[ - "id" -].containing_oneof = _KEY_PATHELEMENT.oneofs_by_name["id_type"] -_KEY_PATHELEMENT.oneofs_by_name["id_type"].fields.append( - _KEY_PATHELEMENT.fields_by_name["name"] -) -_KEY_PATHELEMENT.fields_by_name[ - "name" -].containing_oneof = _KEY_PATHELEMENT.oneofs_by_name["id_type"] -_KEY.fields_by_name["partition_id"].message_type = _PARTITIONID -_KEY.fields_by_name["path"].message_type = _KEY_PATHELEMENT -_RECORDKEY.fields_by_name["datastore_key"].message_type = _DATASTOREKEY -_RECORDKEY.fields_by_name["big_query_key"].message_type = _BIGQUERYKEY -_RECORDKEY.oneofs_by_name["type"].fields.append( - _RECORDKEY.fields_by_name["datastore_key"] -) -_RECORDKEY.fields_by_name["datastore_key"].containing_oneof = _RECORDKEY.oneofs_by_name[ - "type" -] -_RECORDKEY.oneofs_by_name["type"].fields.append( - _RECORDKEY.fields_by_name["big_query_key"] -) -_RECORDKEY.fields_by_name["big_query_key"].containing_oneof = _RECORDKEY.oneofs_by_name[ - "type" -] -_BIGQUERYFIELD.fields_by_name["table"].message_type = _BIGQUERYTABLE -_BIGQUERYFIELD.fields_by_name["field"].message_type = _FIELDID -_ENTITYID.fields_by_name["field"].message_type = _FIELDID -_TABLEOPTIONS.fields_by_name["identifying_fields"].message_type = _FIELDID -DESCRIPTOR.message_types_by_name["InfoType"] = _INFOTYPE -DESCRIPTOR.message_types_by_name["StoredType"] = _STOREDTYPE -DESCRIPTOR.message_types_by_name["CustomInfoType"] = _CUSTOMINFOTYPE -DESCRIPTOR.message_types_by_name["FieldId"] = _FIELDID -DESCRIPTOR.message_types_by_name["PartitionId"] = _PARTITIONID -DESCRIPTOR.message_types_by_name["KindExpression"] = _KINDEXPRESSION -DESCRIPTOR.message_types_by_name["DatastoreOptions"] = _DATASTOREOPTIONS -DESCRIPTOR.message_types_by_name["CloudStorageRegexFileSet"] = _CLOUDSTORAGEREGEXFILESET -DESCRIPTOR.message_types_by_name["CloudStorageOptions"] = _CLOUDSTORAGEOPTIONS -DESCRIPTOR.message_types_by_name["CloudStorageFileSet"] = _CLOUDSTORAGEFILESET -DESCRIPTOR.message_types_by_name["CloudStoragePath"] = _CLOUDSTORAGEPATH -DESCRIPTOR.message_types_by_name["BigQueryOptions"] = _BIGQUERYOPTIONS -DESCRIPTOR.message_types_by_name["StorageConfig"] = _STORAGECONFIG -DESCRIPTOR.message_types_by_name["HybridOptions"] = _HYBRIDOPTIONS -DESCRIPTOR.message_types_by_name["BigQueryKey"] = _BIGQUERYKEY -DESCRIPTOR.message_types_by_name["DatastoreKey"] = _DATASTOREKEY -DESCRIPTOR.message_types_by_name["Key"] = _KEY -DESCRIPTOR.message_types_by_name["RecordKey"] = _RECORDKEY -DESCRIPTOR.message_types_by_name["BigQueryTable"] = _BIGQUERYTABLE -DESCRIPTOR.message_types_by_name["BigQueryField"] = _BIGQUERYFIELD -DESCRIPTOR.message_types_by_name["EntityId"] = _ENTITYID -DESCRIPTOR.message_types_by_name["TableOptions"] = _TABLEOPTIONS -DESCRIPTOR.enum_types_by_name["Likelihood"] = _LIKELIHOOD -DESCRIPTOR.enum_types_by_name["FileType"] = _FILETYPE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -InfoType = _reflection.GeneratedProtocolMessageType( - "InfoType", - (_message.Message,), - { - "DESCRIPTOR": _INFOTYPE, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """Type of information detected by the API. - Attributes: - name: - Name of the information type. Either a name of your choosing - when creating a CustomInfoType, or one of the names listed at - https://cloud.google.com/dlp/docs/infotypes-reference when - specifying a built-in type. InfoType names should conform to - the pattern ``[a-zA-Z0-9_]{1,64}``. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.InfoType) - }, -) -_sym_db.RegisterMessage(InfoType) - -StoredType = _reflection.GeneratedProtocolMessageType( - "StoredType", - (_message.Message,), - { - "DESCRIPTOR": _STOREDTYPE, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """A reference to a StoredInfoType to use with scanning. - Attributes: - name: - Resource name of the requested ``StoredInfoType``, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - ``projects/project-id/storedInfoTypes/432452342``. - create_time: - Timestamp indicating when the version of the - ``StoredInfoType`` used for inspection was created. Output- - only field, populated by the system. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.StoredType) - }, -) -_sym_db.RegisterMessage(StoredType) - -CustomInfoType = _reflection.GeneratedProtocolMessageType( - "CustomInfoType", - (_message.Message,), - { - "Dictionary": _reflection.GeneratedProtocolMessageType( - "Dictionary", - (_message.Message,), - { - "WordList": _reflection.GeneratedProtocolMessageType( - "WordList", - (_message.Message,), - { - "DESCRIPTOR": _CUSTOMINFOTYPE_DICTIONARY_WORDLIST, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """Message defining a list of words or phrases to search for in the data. - Attributes: - words: - Words or phrases defining the dictionary. The dictionary must - contain at least one phrase and every phrase must contain at - least 2 characters that are letters or digits. [required] - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.CustomInfoType.Dictionary.WordList) - }, - ), - "DESCRIPTOR": _CUSTOMINFOTYPE_DICTIONARY, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """Custom information type based on a dictionary of words or phrases. - This can be used to match sensitive information specific to the data, - such as a list of employee IDs or job titles. Dictionary words are - case-insensitive and all characters other than letters and digits in - the unicode `Basic Multilingual Plane `__ will be replaced with - whitespace when scanning for matches, so the dictionary phrase “Sam - Johnson” will match all three phrases “sam johnson”, “Sam, Johnson”, - and “Sam (Johnson)”. Additionally, the characters surrounding any - match must be of a different type than the adjacent characters within - the word, so letters must be next to non-letters and digits next to - non-digits. For example, the dictionary word “jen” will match the - first three letters of the text “jen123” but will return no matches - for “jennifer”. Dictionary words containing a large number of - characters that are not letters or digits may result in unexpected - findings because such characters are treated as whitespace. The - `limits `__ page contains details - about the size limits of dictionaries. For dictionaries that do not - fit within these constraints, consider using - ``LargeCustomDictionaryConfig`` in the ``StoredInfoType`` API. - Attributes: - word_list: - List of words or phrases to search for. - cloud_storage_path: - Newline-delimited file of words in Cloud Storage. Only a - single file is accepted. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.CustomInfoType.Dictionary) - }, - ), - "Regex": _reflection.GeneratedProtocolMessageType( - "Regex", - (_message.Message,), - { - "DESCRIPTOR": _CUSTOMINFOTYPE_REGEX, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """Message defining a custom regular expression. - Attributes: - pattern: - Pattern defining the regular expression. Its syntax - (https://github.com/google/re2/wiki/Syntax) can be found under - the google/re2 repository on GitHub. - group_indexes: - The index of the submatch to extract as findings. When not - specified, the entire match is returned. No more than 3 may be - included. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.CustomInfoType.Regex) - }, - ), - "SurrogateType": _reflection.GeneratedProtocolMessageType( - "SurrogateType", - (_message.Message,), - { - "DESCRIPTOR": _CUSTOMINFOTYPE_SURROGATETYPE, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """Message for detecting output from deidentification transformations - such as ```CryptoReplaceFfxFpeConfig`` `__. These - types of transformations are those that perform pseudonymization, - thereby producing a “surrogate” as output. This should be used in - conjunction with a field on the transformation such as - ``surrogate_info_type``. This CustomInfoType does not support the use - of ``detection_rules``.""", - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.CustomInfoType.SurrogateType) - }, - ), - "DetectionRule": _reflection.GeneratedProtocolMessageType( - "DetectionRule", - (_message.Message,), - { - "Proximity": _reflection.GeneratedProtocolMessageType( - "Proximity", - (_message.Message,), - { - "DESCRIPTOR": _CUSTOMINFOTYPE_DETECTIONRULE_PROXIMITY, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """Message for specifying a window around a finding to apply a detection - rule. - Attributes: - window_before: - Number of characters before the finding to consider. - window_after: - Number of characters after the finding to consider. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity) - }, - ), - "LikelihoodAdjustment": _reflection.GeneratedProtocolMessageType( - "LikelihoodAdjustment", - (_message.Message,), - { - "DESCRIPTOR": _CUSTOMINFOTYPE_DETECTIONRULE_LIKELIHOODADJUSTMENT, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """Message for specifying an adjustment to the likelihood of a finding as - part of a detection rule. - Attributes: - fixed_likelihood: - Set the likelihood of a finding to a fixed value. - relative_likelihood: - Increase or decrease the likelihood by the specified number of - levels. For example, if a finding would be ``POSSIBLE`` - without the detection rule and ``relative_likelihood`` is 1, - then it is upgraded to ``LIKELY``, while a value of -1 would - downgrade it to ``UNLIKELY``. Likelihood may never drop below - ``VERY_UNLIKELY`` or exceed ``VERY_LIKELY``, so applying an - adjustment of 1 followed by an adjustment of -1 when base - likelihood is ``VERY_LIKELY`` will result in a final - likelihood of ``LIKELY``. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.CustomInfoType.DetectionRule.LikelihoodAdjustment) - }, - ), - "HotwordRule": _reflection.GeneratedProtocolMessageType( - "HotwordRule", - (_message.Message,), - { - "DESCRIPTOR": _CUSTOMINFOTYPE_DETECTIONRULE_HOTWORDRULE, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """The rule that adjusts the likelihood of findings within a certain - proximity of hotwords. - - Attributes: - hotword_regex: \ - Regular expression pattern defining what qualifies as a \ - hotword. - proximity: - Proximity of the finding within which the entire hotword must - reside. The total length of the window cannot exceed 1000 - characters. Note that the finding itself will be included in - the window, so that hotwords may be used to match substrings - of the finding itself. For example, the certainty of a phone - number regex "(\d\{3\}) \d\{3\}-\d\{4\} "\ - could be adjusted upwards if the area code is \ - known to be the local area code of a company office using the - hotword regex “(xxx)”, where “xxx” is the area code in - question. - likelihood_adjustment: \ - Likelihood adjustment to apply to all matching findings. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.CustomInfoType.DetectionRule.HotwordRule) - }, - ), - "DESCRIPTOR": _CUSTOMINFOTYPE_DETECTIONRULE, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """Deprecated; use ``InspectionRuleSet`` instead. Rule for modifying a - ``CustomInfoType`` to alter behavior under certain circumstances, - depending on the specific details of the rule. Not supported for the - ``surrogate_type`` custom infoType. - Attributes: - hotword_rule: - Hotword-based detection rule. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.CustomInfoType.DetectionRule) - }, - ), - "DESCRIPTOR": _CUSTOMINFOTYPE, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """Custom information type provided by the user. Used to find domain- - specific sensitive information configurable to the data in question. - Attributes: - info_type: - CustomInfoType can either be a new infoType, or an extension - of built-in infoType, when the name matches one of existing - infoTypes and that infoType is specified in - ``InspectContent.info_types`` field. Specifying the latter - adds findings to the one detected by the system. If built-in - info type is not specified in ``InspectContent.info_types`` - list then the name is treated as a custom info type. - likelihood: - Likelihood to return for this CustomInfoType. This base value - can be altered by a detection rule if the finding meets the - criteria specified by the rule. Defaults to ``VERY_LIKELY`` if - not specified. - dictionary: - A list of phrases to detect as a CustomInfoType. - regex: - Regular expression based CustomInfoType. - surrogate_type: - Message for detecting output from deidentification - transformations that support reversing. - stored_type: - Load an existing ``StoredInfoType`` resource for use in - ``InspectDataSource``. Not currently supported in - ``InspectContent``. - detection_rules: - Set of detection rules to apply to all findings of this - CustomInfoType. Rules are applied in order that they are - specified. Not supported for the ``surrogate_type`` - CustomInfoType. - exclusion_type: - If set to EXCLUSION_TYPE_EXCLUDE this infoType will not cause - a finding to be returned. It still can be used for rules - matching. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.CustomInfoType) - }, -) -_sym_db.RegisterMessage(CustomInfoType) -_sym_db.RegisterMessage(CustomInfoType.Dictionary) -_sym_db.RegisterMessage(CustomInfoType.Dictionary.WordList) -_sym_db.RegisterMessage(CustomInfoType.Regex) -_sym_db.RegisterMessage(CustomInfoType.SurrogateType) -_sym_db.RegisterMessage(CustomInfoType.DetectionRule) -_sym_db.RegisterMessage(CustomInfoType.DetectionRule.Proximity) -_sym_db.RegisterMessage(CustomInfoType.DetectionRule.LikelihoodAdjustment) -_sym_db.RegisterMessage(CustomInfoType.DetectionRule.HotwordRule) - -FieldId = _reflection.GeneratedProtocolMessageType( - "FieldId", - (_message.Message,), - { - "DESCRIPTOR": _FIELDID, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """General identifier of a data field in a storage service. - Attributes: - name: - Name describing the field. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.FieldId) - }, -) -_sym_db.RegisterMessage(FieldId) - -PartitionId = _reflection.GeneratedProtocolMessageType( - "PartitionId", - (_message.Message,), - { - "DESCRIPTOR": _PARTITIONID, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """Datastore partition ID. A partition ID identifies a grouping of - entities. The grouping is always by project and namespace, however the - namespace ID may be empty. A partition ID contains several - dimensions: project ID and namespace ID. - Attributes: - project_id: - The ID of the project to which the entities belong. - namespace_id: - If not empty, the ID of the namespace to which the entities - belong. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.PartitionId) - }, -) -_sym_db.RegisterMessage(PartitionId) - -KindExpression = _reflection.GeneratedProtocolMessageType( - "KindExpression", - (_message.Message,), - { - "DESCRIPTOR": _KINDEXPRESSION, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """A representation of a Datastore kind. - Attributes: - name: - The name of the kind. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.KindExpression) - }, -) -_sym_db.RegisterMessage(KindExpression) - -DatastoreOptions = _reflection.GeneratedProtocolMessageType( - "DatastoreOptions", - (_message.Message,), - { - "DESCRIPTOR": _DATASTOREOPTIONS, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """Options defining a data set within Google Cloud Datastore. - Attributes: - partition_id: - A partition ID identifies a grouping of entities. The grouping - is always by project and namespace, however the namespace ID - may be empty. - kind: - The kind to process. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.DatastoreOptions) - }, -) -_sym_db.RegisterMessage(DatastoreOptions) - -CloudStorageRegexFileSet = _reflection.GeneratedProtocolMessageType( - "CloudStorageRegexFileSet", - (_message.Message,), - { - "DESCRIPTOR": _CLOUDSTORAGEREGEXFILESET, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """Message representing a set of files in a Cloud Storage bucket. Regular - expressions are used to allow fine-grained control over which files in - the bucket to include. Included files are those that match at least - one item in ``include_regex`` and do not match any items in - ``exclude_regex``. Note that a file that matches items from both lists - will *not* be included. For a match to occur, the entire file path - (i.e., everything in the url after the bucket name) must match the - regular expression. For example, given the input ``{bucket_name: - "mybucket", include_regex: ["directory1/.*"], exclude_regex: - ["directory1/excluded.*"]}``: - ``gs://mybucket/directory1/myfile`` - will be included - ``gs://mybucket/directory1/directory2/myfile`` - will be included (``.*`` matches across ``/``) - - ``gs://mybucket/directory0/directory1/myfile`` will *not* be included - (the full path doesn’t match any items in ``include_regex``) - - ``gs://mybucket/directory1/excludedfile`` will *not* be included (the - path matches an item in ``exclude_regex``) If ``include_regex`` is - left empty, it will match all files by default (this is equivalent to - setting ``include_regex: [".*"]``). Some other common use cases: - - ``{bucket_name: "mybucket", exclude_regex: [".*\.pdf"]}`` will - include all files in ``mybucket`` except for .pdf files - - ``{bucket_name: "mybucket", include_regex: ["directory/[^/]+"]}`` - will include all files directly under ``gs://mybucket/directory/``, - without matching across ``/`` - Attributes: - bucket_name: - The name of a Cloud Storage bucket. Required. - include_regex: - A list of regular expressions matching file paths to include. - All files in the bucket that match at least one of these - regular expressions will be included in the set of files, - except for those that also match an item in ``exclude_regex``. - Leaving this field empty will match all files by default (this - is equivalent to including ``.*`` in the list). Regular - expressions use RE2 `syntax - `__; a guide can be - found under the google/re2 repository on GitHub. - exclude_regex: - A list of regular expressions matching file paths to exclude. - All files in the bucket that match at least one of these - regular expressions will be excluded from the scan. Regular - expressions use RE2 `syntax - `__; a guide can be - found under the google/re2 repository on GitHub. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.CloudStorageRegexFileSet) - }, -) -_sym_db.RegisterMessage(CloudStorageRegexFileSet) - -CloudStorageOptions = _reflection.GeneratedProtocolMessageType( - "CloudStorageOptions", - (_message.Message,), - { - "FileSet": _reflection.GeneratedProtocolMessageType( - "FileSet", - (_message.Message,), - { - "DESCRIPTOR": _CLOUDSTORAGEOPTIONS_FILESET, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """Set of files to scan. - Attributes: - url: - The Cloud Storage url of the file(s) to scan, in the format - ``gs:///``. Trailing wildcard in the path is - allowed. If the url ends in a trailing slash, the bucket or - directory represented by the url will be scanned non- - recursively (content in sub-directories will not be scanned). - This means that ``gs://mybucket/`` is equivalent to - ``gs://mybucket/*``, and ``gs://mybucket/directory/`` is - equivalent to ``gs://mybucket/directory/*``. Exactly one of - ``url`` or ``regex_file_set`` must be set. - regex_file_set: - The regex-filtered set of files to scan. Exactly one of - ``url`` or ``regex_file_set`` must be set. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.CloudStorageOptions.FileSet) - }, - ), - "DESCRIPTOR": _CLOUDSTORAGEOPTIONS, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """Options defining a file or a set of files within a Google Cloud - Storage bucket. - Attributes: - file_set: - The set of one or more files to scan. - bytes_limit_per_file: - Max number of bytes to scan from a file. If a scanned file’s - size is bigger than this value then the rest of the bytes are - omitted. Only one of bytes_limit_per_file and - bytes_limit_per_file_percent can be specified. - bytes_limit_per_file_percent: - Max percentage of bytes to scan from a file. The rest are - omitted. The number of bytes scanned is rounded down. Must be - between 0 and 100, inclusively. Both 0 and 100 means no limit. - Defaults to 0. Only one of bytes_limit_per_file and - bytes_limit_per_file_percent can be specified. - file_types: - List of file type groups to include in the scan. If empty, all - files are scanned and available data format processors are - applied. In addition, the binary content of the selected files - is always scanned as well. Images are scanned only as binary - if the specified region does not support image inspection and - no file_types were specified. Image inspection is restricted - to ‘global’, ‘us’, ‘asia’, and ‘europe’. - files_limit_percent: - Limits the number of files to scan to this percentage of the - input FileSet. Number of files scanned is rounded down. Must - be between 0 and 100, inclusively. Both 0 and 100 means no - limit. Defaults to 0. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.CloudStorageOptions) - }, -) -_sym_db.RegisterMessage(CloudStorageOptions) -_sym_db.RegisterMessage(CloudStorageOptions.FileSet) - -CloudStorageFileSet = _reflection.GeneratedProtocolMessageType( - "CloudStorageFileSet", - (_message.Message,), - { - "DESCRIPTOR": _CLOUDSTORAGEFILESET, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """Message representing a set of files in Cloud Storage. - Attributes: - url: - The url, in the format ``gs:///``. Trailing - wildcard in the path is allowed. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.CloudStorageFileSet) - }, -) -_sym_db.RegisterMessage(CloudStorageFileSet) - -CloudStoragePath = _reflection.GeneratedProtocolMessageType( - "CloudStoragePath", - (_message.Message,), - { - "DESCRIPTOR": _CLOUDSTORAGEPATH, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """Message representing a single file or path in Cloud Storage. - Attributes: - path: - A url representing a file or path (no wildcards) in Cloud - Storage. Example: gs://[BUCKET_NAME]/dictionary.txt - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.CloudStoragePath) - }, -) -_sym_db.RegisterMessage(CloudStoragePath) - -BigQueryOptions = _reflection.GeneratedProtocolMessageType( - "BigQueryOptions", - (_message.Message,), - { - "DESCRIPTOR": _BIGQUERYOPTIONS, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """Options defining BigQuery table and row identifiers. - Attributes: - table_reference: - Complete BigQuery table reference. - identifying_fields: - Table fields that may uniquely identify a row within the - table. When ``actions.saveFindings.outputConfig.table`` is - specified, the values of columns specified here are available - in the output table under ``location.content_locations.record_ - location.record_key.id_values``. Nested fields such as - ``person.birthdate.year`` are allowed. - rows_limit: - Max number of rows to scan. If the table has more rows than - this value, the rest of the rows are omitted. If not set, or - if set to 0, all rows will be scanned. Only one of rows_limit - and rows_limit_percent can be specified. Cannot be used in - conjunction with TimespanConfig. - rows_limit_percent: - Max percentage of rows to scan. The rest are omitted. The - number of rows scanned is rounded down. Must be between 0 and - 100, inclusively. Both 0 and 100 means no limit. Defaults to - 0. Only one of rows_limit and rows_limit_percent can be - specified. Cannot be used in conjunction with TimespanConfig. - excluded_fields: - References to fields excluded from scanning. This allows you - to skip inspection of entire columns which you know have no - findings. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.BigQueryOptions) - }, -) -_sym_db.RegisterMessage(BigQueryOptions) - -StorageConfig = _reflection.GeneratedProtocolMessageType( - "StorageConfig", - (_message.Message,), - { - "TimespanConfig": _reflection.GeneratedProtocolMessageType( - "TimespanConfig", - (_message.Message,), - { - "DESCRIPTOR": _STORAGECONFIG_TIMESPANCONFIG, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """Configuration of the timespan of the items to include in scanning. - Currently only supported when inspecting Google Cloud Storage and - BigQuery. - Attributes: - start_time: - Exclude files or rows older than this value. - end_time: - Exclude files or rows newer than this value. If set to zero, - no upper time limit is applied. - timestamp_field: - Specification of the field containing the timestamp of scanned - items. Used for data sources like Datastore and BigQuery. For - BigQuery: Required to filter out rows based on the given start - and end times. If not specified and the table was modified - between the given start and end times, the entire table will - be scanned. The valid data types of the timestamp field are: - ``INTEGER``, ``DATE``, ``TIMESTAMP``, or ``DATETIME`` BigQuery - column. For Datastore. Valid data types of the timestamp - field are: ``TIMESTAMP``. Datastore entity will be scanned if - the timestamp property does not exist or its value is empty or - invalid. - enable_auto_population_of_timespan_config: - When the job is started by a JobTrigger we will automatically - figure out a valid start_time to avoid scanning files that - have not been modified since the last time the JobTrigger - executed. This will be based on the time of the execution of - the last run of the JobTrigger. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.StorageConfig.TimespanConfig) - }, - ), - "DESCRIPTOR": _STORAGECONFIG, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """Shared message indicating Cloud storage type. - Attributes: - datastore_options: - Google Cloud Datastore options. - cloud_storage_options: - Google Cloud Storage options. - big_query_options: - BigQuery options. - hybrid_options: - Hybrid inspection options. Early access feature is in a pre- - release state and might change or have limited support. For - more information, see - https://cloud.google.com/products#product-launch-stages. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.StorageConfig) - }, -) -_sym_db.RegisterMessage(StorageConfig) -_sym_db.RegisterMessage(StorageConfig.TimespanConfig) - -HybridOptions = _reflection.GeneratedProtocolMessageType( - "HybridOptions", - (_message.Message,), - { - "LabelsEntry": _reflection.GeneratedProtocolMessageType( - "LabelsEntry", - (_message.Message,), - { - "DESCRIPTOR": _HYBRIDOPTIONS_LABELSENTRY, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2" - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.HybridOptions.LabelsEntry) - }, - ), - "DESCRIPTOR": _HYBRIDOPTIONS, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """Configuration to control jobs where the content being inspected is - outside of Google Cloud Platform. - Attributes: - description: - A short description of where the data is coming from. Will be - stored once in the job. 256 max length. - required_finding_label_keys: - These are labels that each inspection request must include - within their ‘finding_labels’ map. Request may contain others, - but any missing one of these will be rejected. Label keys - must be between 1 and 63 characters long and must conform to - the following regular expression: - ``[a-z]([-a-z0-9]*[a-z0-9])?``. No more than 10 keys can be - required. - labels: - To organize findings, these labels will be added to each - finding. Label keys must be between 1 and 63 characters long - and must conform to the following regular expression: - ``[a-z]([-a-z0-9]*[a-z0-9])?``. Label values must be between - 0 and 63 characters long and must conform to the regular - expression ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. No more than 10 - labels can be associated with a given finding. Examples: \* - ``"environment" : "production"`` \* ``"pipeline" : "etl"`` - table_options: - If the container is a table, additional information to make - findings meaningful such as the columns that are primary keys. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.HybridOptions) - }, -) -_sym_db.RegisterMessage(HybridOptions) -_sym_db.RegisterMessage(HybridOptions.LabelsEntry) - -BigQueryKey = _reflection.GeneratedProtocolMessageType( - "BigQueryKey", - (_message.Message,), - { - "DESCRIPTOR": _BIGQUERYKEY, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """Row key for identifying a record in BigQuery table. - Attributes: - table_reference: - Complete BigQuery table reference. - row_number: - Row number inferred at the time the table was scanned. This - value is nondeterministic, cannot be queried, and may be null - for inspection jobs. To locate findings within a table, - specify ``inspect_job.storage_config.big_query_options.identif - ying_fields`` in ``CreateDlpJobRequest``. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.BigQueryKey) - }, -) -_sym_db.RegisterMessage(BigQueryKey) - -DatastoreKey = _reflection.GeneratedProtocolMessageType( - "DatastoreKey", - (_message.Message,), - { - "DESCRIPTOR": _DATASTOREKEY, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """Record key for a finding in Cloud Datastore. - Attributes: - entity_key: - Datastore entity key. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.DatastoreKey) - }, -) -_sym_db.RegisterMessage(DatastoreKey) - -Key = _reflection.GeneratedProtocolMessageType( - "Key", - (_message.Message,), - { - "PathElement": _reflection.GeneratedProtocolMessageType( - "PathElement", - (_message.Message,), - { - "DESCRIPTOR": _KEY_PATHELEMENT, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """A (kind, ID/name) pair used to construct a key path. If either name - or ID is set, the element is complete. If neither is set, the element - is incomplete. - Attributes: - kind: - The kind of the entity. A kind matching regex ``__.*__`` is - reserved/read-only. A kind must not contain more than 1500 - bytes when UTF-8 encoded. Cannot be ``""``. - id_type: - The type of ID. - id: - The auto-allocated ID of the entity. Never equal to zero. - Values less than zero are discouraged and may not be supported - in the future. - name: - The name of the entity. A name matching regex ``__.*__`` is - reserved/read-only. A name must not be more than 1500 bytes - when UTF-8 encoded. Cannot be ``""``. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.Key.PathElement) - }, - ), - "DESCRIPTOR": _KEY, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """A unique identifier for a Datastore entity. If a key’s partition ID or - any of its path kinds or names are reserved/read-only, the key is - reserved/read-only. A reserved/read-only key is forbidden in certain - documented contexts. - Attributes: - partition_id: - Entities are partitioned into subsets, currently identified by - a project ID and namespace ID. Queries are scoped to a single - partition. - path: - The entity path. An entity path consists of one or more - elements composed of a kind and a string or numerical - identifier, which identify entities. The first element - identifies a *root entity*, the second element identifies a - *child* of the root entity, the third element identifies a - child of the second entity, and so forth. The entities - identified by all prefixes of the path are called the - element’s *ancestors*. A path can never be empty, and a path - can have at most 100 elements. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.Key) - }, -) -_sym_db.RegisterMessage(Key) -_sym_db.RegisterMessage(Key.PathElement) - -RecordKey = _reflection.GeneratedProtocolMessageType( - "RecordKey", - (_message.Message,), - { - "DESCRIPTOR": _RECORDKEY, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """Message for a unique key indicating a record that contains a finding. - Attributes: - id_values: - Values of identifying columns in the given row. Order of - values matches the order of ``identifying_fields`` specified - in the scanning request. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.RecordKey) - }, -) -_sym_db.RegisterMessage(RecordKey) - -BigQueryTable = _reflection.GeneratedProtocolMessageType( - "BigQueryTable", - (_message.Message,), - { - "DESCRIPTOR": _BIGQUERYTABLE, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """Message defining the location of a BigQuery table. A table is uniquely - identified by its project_id, dataset_id, and table_name. Within a - query a table is often referenced with a string in the format of: - ``:.`` or - ``..``. - Attributes: - project_id: - The Google Cloud Platform project ID of the project containing - the table. If omitted, project ID is inferred from the API - call. - dataset_id: - Dataset ID of the table. - table_id: - Name of the table. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.BigQueryTable) - }, -) -_sym_db.RegisterMessage(BigQueryTable) - -BigQueryField = _reflection.GeneratedProtocolMessageType( - "BigQueryField", - (_message.Message,), - { - "DESCRIPTOR": _BIGQUERYFIELD, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """Message defining a field of a BigQuery table. - Attributes: - table: - Source table of the field. - field: - Designated field in the BigQuery table. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.BigQueryField) - }, -) -_sym_db.RegisterMessage(BigQueryField) - -EntityId = _reflection.GeneratedProtocolMessageType( - "EntityId", - (_message.Message,), - { - "DESCRIPTOR": _ENTITYID, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """An entity in a dataset is a field or set of fields that correspond to - a single person. For example, in medical records the ``EntityId`` - might be a patient identifier, or for financial records it might be an - account identifier. This message is used when generalizations or - analysis must take into account that multiple rows correspond to the - same entity. - Attributes: - field: - Composite key indicating which field contains the entity - identifier. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.EntityId) - }, -) -_sym_db.RegisterMessage(EntityId) - -TableOptions = _reflection.GeneratedProtocolMessageType( - "TableOptions", - (_message.Message,), - { - "DESCRIPTOR": _TABLEOPTIONS, - "__module__": "google.cloud.dlp_v2.proto.storage_pb2", - "__doc__": """Instructions regarding the table content being inspected. - Attributes: - identifying_fields: - The columns that are the primary keys for table objects - included in ContentItem. A copy of this cell’s value will - stored alongside alongside each finding so that the finding - can be traced to the specific row it came from. No more than 3 - may be provided. - """, - # @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.TableOptions) - }, -) -_sym_db.RegisterMessage(TableOptions) - - -DESCRIPTOR._options = None -_HYBRIDOPTIONS_LABELSENTRY._options = None -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/dlp_v2/proto/storage_pb2_grpc.py b/google/cloud/dlp_v2/proto/storage_pb2_grpc.py deleted file mode 100644 index 07cb78fe..00000000 --- a/google/cloud/dlp_v2/proto/storage_pb2_grpc.py +++ /dev/null @@ -1,2 +0,0 @@ -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc diff --git a/google/cloud/dlp_v2/py.typed b/google/cloud/dlp_v2/py.typed new file mode 100644 index 00000000..23d89ef3 --- /dev/null +++ b/google/cloud/dlp_v2/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-dlp package uses inline types. diff --git a/google/cloud/__init__.py b/google/cloud/dlp_v2/services/__init__.py similarity index 71% rename from google/cloud/__init__.py rename to google/cloud/dlp_v2/services/__init__.py index 9a1b64a6..42ffdf2b 100644 --- a/google/cloud/__init__.py +++ b/google/cloud/dlp_v2/services/__init__.py @@ -1,24 +1,16 @@ # -*- coding: utf-8 -*- -# + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) +# diff --git a/google/__init__.py b/google/cloud/dlp_v2/services/dlp_service/__init__.py similarity index 71% rename from google/__init__.py rename to google/cloud/dlp_v2/services/dlp_service/__init__.py index 9a1b64a6..c55616fa 100644 --- a/google/__init__.py +++ b/google/cloud/dlp_v2/services/dlp_service/__init__.py @@ -1,24 +1,24 @@ # -*- coding: utf-8 -*- -# + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # -# https://www.apache.org/licenses/LICENSE-2.0 +# http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil +from .client import DlpServiceClient +from .async_client import DlpServiceAsyncClient - __path__ = pkgutil.extend_path(__path__, __name__) +__all__ = ( + "DlpServiceClient", + "DlpServiceAsyncClient", +) diff --git a/google/cloud/dlp_v2/services/dlp_service/async_client.py b/google/cloud/dlp_v2/services/dlp_service/async_client.py new file mode 100644 index 00000000..3ffaf6b5 --- /dev/null +++ b/google/cloud/dlp_v2/services/dlp_service/async_client.py @@ -0,0 +1,2875 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dlp_v2.services.dlp_service import pagers +from google.cloud.dlp_v2.types import dlp +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + +from .transports.base import DlpServiceTransport +from .transports.grpc_asyncio import DlpServiceGrpcAsyncIOTransport +from .client import DlpServiceClient + + +class DlpServiceAsyncClient: + """The Cloud Data Loss Prevention (DLP) API is a service that + allows clients to detect the presence of Personally Identifiable + Information (PII) and other privacy-sensitive data in user- + supplied, unstructured data streams, like text blocks or images. + The service also includes methods for sensitive data redaction + and scheduling of data scans on Google Cloud Platform based data + sets. + To learn more about concepts and find how-to guides see + https://cloud.google.com/dlp/docs/. + """ + + _client: DlpServiceClient + + DEFAULT_ENDPOINT = DlpServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DlpServiceClient.DEFAULT_MTLS_ENDPOINT + + job_trigger_path = staticmethod(DlpServiceClient.job_trigger_path) + + inspect_template_path = staticmethod(DlpServiceClient.inspect_template_path) + + deidentify_template_path = staticmethod(DlpServiceClient.deidentify_template_path) + + from_service_account_file = DlpServiceClient.from_service_account_file + from_service_account_json = from_service_account_file + + get_transport_class = functools.partial( + type(DlpServiceClient).get_transport_class, type(DlpServiceClient) + ) + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, DlpServiceTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + ) -> None: + """Instantiate the dlp service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DlpServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint, this is the default value for + the environment variable) and "auto" (auto switch to the default + mTLS endpoint if client SSL credentials is present). However, + the ``api_endpoint`` property takes precedence if provided. + (2) The ``client_cert_source`` property is used to provide client + SSL credentials for mutual TLS transport. If not provided, the + default SSL credentials will be used if present. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + + self._client = DlpServiceClient( + credentials=credentials, transport=transport, client_options=client_options, + ) + + async def inspect_content( + self, + request: dlp.InspectContentRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectContentResponse: + r"""Finds potentially sensitive info in content. + This method has limits on input size, processing time, + and output size. + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + For how to guides, see + https://cloud.google.com/dlp/docs/inspecting-images and + https://cloud.google.com/dlp/docs/inspecting-text, + + Args: + request (:class:`~.dlp.InspectContentRequest`): + The request object. Request to search for potentially + sensitive info in a ContentItem. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.InspectContentResponse: + Results of inspecting an item. + """ + # Create or coerce a protobuf request object. + + request = dlp.InspectContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.inspect_content, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def redact_image( + self, + request: dlp.RedactImageRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.RedactImageResponse: + r"""Redacts potentially sensitive info from an image. + This method has limits on input size, processing time, + and output size. See + https://cloud.google.com/dlp/docs/redacting-sensitive- + data-images to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Args: + request (:class:`~.dlp.RedactImageRequest`): + The request object. Request to search for potentially + sensitive info in an image and redact it by covering it + with a colored rectangle. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.RedactImageResponse: + Results of redacting an image. + """ + # Create or coerce a protobuf request object. + + request = dlp.RedactImageRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.redact_image, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def deidentify_content( + self, + request: dlp.DeidentifyContentRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyContentResponse: + r"""De-identifies potentially sensitive info from a + ContentItem. This method has limits on input size and + output size. See + https://cloud.google.com/dlp/docs/deidentify-sensitive- + data to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Args: + request (:class:`~.dlp.DeidentifyContentRequest`): + The request object. Request to de-identify a list of + items. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DeidentifyContentResponse: + Results of de-identifying a + ContentItem. + + """ + # Create or coerce a protobuf request object. + + request = dlp.DeidentifyContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.deidentify_content, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def reidentify_content( + self, + request: dlp.ReidentifyContentRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ReidentifyContentResponse: + r"""Re-identifies content that has been de-identified. See + https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example + to learn more. + + Args: + request (:class:`~.dlp.ReidentifyContentRequest`): + The request object. Request to re-identify an item. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ReidentifyContentResponse: + Results of re-identifying a item. + """ + # Create or coerce a protobuf request object. + + request = dlp.ReidentifyContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.reidentify_content, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_info_types( + self, + request: dlp.ListInfoTypesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ListInfoTypesResponse: + r"""Returns a list of the sensitive information types + that the DLP API supports. See + https://cloud.google.com/dlp/docs/infotypes-reference to + learn more. + + Args: + request (:class:`~.dlp.ListInfoTypesRequest`): + The request object. Request for the list of infoTypes. + parent (:class:`str`): + The parent resource name. + + - Format:locations/[LOCATION-ID] + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ListInfoTypesResponse: + Response to the ListInfoTypes + request. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.ListInfoTypesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_info_types, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def create_inspect_template( + self, + request: dlp.CreateInspectTemplateRequest = None, + *, + parent: str = None, + inspect_template: dlp.InspectTemplate = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Creates an InspectTemplate for re-using frequently + used configuration for inspecting content, images, and + storage. See https://cloud.google.com/dlp/docs/creating- + templates to learn more. + + Args: + request (:class:`~.dlp.CreateInspectTemplateRequest`): + The request object. Request message for + CreateInspectTemplate. + parent (:class:`str`): + Required. Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:organizations/[ORGANIZATION-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_template (:class:`~.dlp.InspectTemplate`): + Required. The InspectTemplate to + create. + This corresponds to the ``inspect_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts- + templates to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, inspect_template]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.CreateInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if inspect_template is not None: + request.inspect_template = inspect_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_inspect_template, + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def update_inspect_template( + self, + request: dlp.UpdateInspectTemplateRequest = None, + *, + name: str = None, + inspect_template: dlp.InspectTemplate = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Updates the InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Args: + request (:class:`~.dlp.UpdateInspectTemplateRequest`): + The request object. Request message for + UpdateInspectTemplate. + name (:class:`str`): + Required. Resource name of organization and + inspectTemplate to be updated, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_template (:class:`~.dlp.InspectTemplate`): + New InspectTemplate value. + This corresponds to the ``inspect_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`~.field_mask.FieldMask`): + Mask to control which fields get + updated. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts- + templates to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name, inspect_template, update_mask]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.UpdateInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if inspect_template is not None: + request.inspect_template = inspect_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_inspect_template, + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_inspect_template( + self, + request: dlp.GetInspectTemplateRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Gets an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Args: + request (:class:`~.dlp.GetInspectTemplateRequest`): + The request object. Request message for + GetInspectTemplate. + name (:class:`str`): + Required. Resource name of the organization and + inspectTemplate to be read, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts- + templates to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.GetInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_inspect_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_inspect_templates( + self, + request: dlp.ListInspectTemplatesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInspectTemplatesAsyncPager: + r"""Lists InspectTemplates. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Args: + request (:class:`~.dlp.ListInspectTemplatesRequest`): + The request object. Request message for + ListInspectTemplates. + parent (:class:`str`): + Required. Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:organizations/[ORGANIZATION-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListInspectTemplatesAsyncPager: + Response message for + ListInspectTemplates. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.ListInspectTemplatesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_inspect_templates, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInspectTemplatesAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_inspect_template( + self, + request: dlp.DeleteInspectTemplateRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Args: + request (:class:`~.dlp.DeleteInspectTemplateRequest`): + The request object. Request message for + DeleteInspectTemplate. + name (:class:`str`): + Required. Resource name of the organization and + inspectTemplate to be deleted, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.DeleteInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_inspect_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def create_deidentify_template( + self, + request: dlp.CreateDeidentifyTemplateRequest = None, + *, + parent: str = None, + deidentify_template: dlp.DeidentifyTemplate = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Creates a DeidentifyTemplate for re-using frequently + used configuration for de-identifying content, images, + and storage. See + https://cloud.google.com/dlp/docs/creating-templates- + deid to learn more. + + Args: + request (:class:`~.dlp.CreateDeidentifyTemplateRequest`): + The request object. Request message for + CreateDeidentifyTemplate. + parent (:class:`str`): + Required. Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:organizations/[ORGANIZATION-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deidentify_template (:class:`~.dlp.DeidentifyTemplate`): + Required. The DeidentifyTemplate to + create. + This corresponds to the ``deidentify_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts- + templates to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, deidentify_template]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.CreateDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if deidentify_template is not None: + request.deidentify_template = deidentify_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_deidentify_template, + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def update_deidentify_template( + self, + request: dlp.UpdateDeidentifyTemplateRequest = None, + *, + name: str = None, + deidentify_template: dlp.DeidentifyTemplate = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Updates the DeidentifyTemplate. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Args: + request (:class:`~.dlp.UpdateDeidentifyTemplateRequest`): + The request object. Request message for + UpdateDeidentifyTemplate. + name (:class:`str`): + Required. Resource name of organization and deidentify + template to be updated, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deidentify_template (:class:`~.dlp.DeidentifyTemplate`): + New DeidentifyTemplate value. + This corresponds to the ``deidentify_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`~.field_mask.FieldMask`): + Mask to control which fields get + updated. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts- + templates to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name, deidentify_template, update_mask]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.UpdateDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if deidentify_template is not None: + request.deidentify_template = deidentify_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_deidentify_template, + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_deidentify_template( + self, + request: dlp.GetDeidentifyTemplateRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Gets a DeidentifyTemplate. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Args: + request (:class:`~.dlp.GetDeidentifyTemplateRequest`): + The request object. Request message for + GetDeidentifyTemplate. + name (:class:`str`): + Required. Resource name of the organization and + deidentify template to be read, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts- + templates to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.GetDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_deidentify_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_deidentify_templates( + self, + request: dlp.ListDeidentifyTemplatesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDeidentifyTemplatesAsyncPager: + r"""Lists DeidentifyTemplates. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Args: + request (:class:`~.dlp.ListDeidentifyTemplatesRequest`): + The request object. Request message for + ListDeidentifyTemplates. + parent (:class:`str`): + Required. Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:organizations/[ORGANIZATION-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListDeidentifyTemplatesAsyncPager: + Response message for + ListDeidentifyTemplates. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.ListDeidentifyTemplatesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_deidentify_templates, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDeidentifyTemplatesAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_deidentify_template( + self, + request: dlp.DeleteDeidentifyTemplateRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a DeidentifyTemplate. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Args: + request (:class:`~.dlp.DeleteDeidentifyTemplateRequest`): + The request object. Request message for + DeleteDeidentifyTemplate. + name (:class:`str`): + Required. Resource name of the organization and + deidentify template to be deleted, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.DeleteDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_deidentify_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def create_job_trigger( + self, + request: dlp.CreateJobTriggerRequest = None, + *, + parent: str = None, + job_trigger: dlp.JobTrigger = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Creates a job trigger to run DLP actions such as + scanning storage for sensitive information on a set + schedule. See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Args: + request (:class:`~.dlp.CreateJobTriggerRequest`): + The request object. Request message for + CreateJobTrigger. + parent (:class:`str`): + Required. Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_trigger (:class:`~.dlp.JobTrigger`): + Required. The JobTrigger to create. + This corresponds to the ``job_trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts- + job-triggers to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, job_trigger]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.CreateJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if job_trigger is not None: + request.job_trigger = job_trigger + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_job_trigger, + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def update_job_trigger( + self, + request: dlp.UpdateJobTriggerRequest = None, + *, + name: str = None, + job_trigger: dlp.JobTrigger = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Updates a job trigger. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Args: + request (:class:`~.dlp.UpdateJobTriggerRequest`): + The request object. Request message for + UpdateJobTrigger. + name (:class:`str`): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_trigger (:class:`~.dlp.JobTrigger`): + New JobTrigger value. + This corresponds to the ``job_trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`~.field_mask.FieldMask`): + Mask to control which fields get + updated. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts- + job-triggers to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name, job_trigger, update_mask]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.UpdateJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if job_trigger is not None: + request.job_trigger = job_trigger + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_job_trigger, + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def hybrid_inspect_job_trigger( + self, + request: dlp.HybridInspectJobTriggerRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.HybridInspectResponse: + r"""Inspect hybrid content and store findings to a + trigger. The inspection will be processed + asynchronously. To review the findings monitor the jobs + within the trigger. + Early access feature is in a pre-release state and might + change or have limited support. For more information, + see + https://cloud.google.com/products#product-launch-stages. + + Args: + request (:class:`~.dlp.HybridInspectJobTriggerRequest`): + The request object. Request to search for potentially + sensitive info in a custom location. + name (:class:`str`): + Required. Resource name of the trigger to execute a + hybrid inspect on, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.HybridInspectJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.hybrid_inspect_job_trigger, + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_job_trigger( + self, + request: dlp.GetJobTriggerRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Gets a job trigger. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Args: + request (:class:`~.dlp.GetJobTriggerRequest`): + The request object. Request message for GetJobTrigger. + name (:class:`str`): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts- + job-triggers to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.GetJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_job_trigger, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_job_triggers( + self, + request: dlp.ListJobTriggersRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobTriggersAsyncPager: + r"""Lists job triggers. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Args: + request (:class:`~.dlp.ListJobTriggersRequest`): + The request object. Request message for ListJobTriggers. + parent (:class:`str`): + Required. Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListJobTriggersAsyncPager: + Response message for ListJobTriggers. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.ListJobTriggersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_job_triggers, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListJobTriggersAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_job_trigger( + self, + request: dlp.DeleteJobTriggerRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a job trigger. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Args: + request (:class:`~.dlp.DeleteJobTriggerRequest`): + The request object. Request message for + DeleteJobTrigger. + name (:class:`str`): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.DeleteJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_job_trigger, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def activate_job_trigger( + self, + request: dlp.ActivateJobTriggerRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Activate a job trigger. Causes the immediate execute + of a trigger instead of waiting on the trigger event to + occur. + + Args: + request (:class:`~.dlp.ActivateJobTriggerRequest`): + The request object. Request message for + ActivateJobTrigger. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + + request = dlp.ActivateJobTriggerRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.activate_job_trigger, + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def create_dlp_job( + self, + request: dlp.CreateDlpJobRequest = None, + *, + parent: str = None, + inspect_job: dlp.InspectJobConfig = None, + risk_job: dlp.RiskAnalysisJobConfig = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Creates a new job to inspect storage or calculate + risk metrics. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + When no InfoTypes or CustomInfoTypes are specified in + inspect jobs, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Args: + request (:class:`~.dlp.CreateDlpJobRequest`): + The request object. Request message for + CreateDlpJobRequest. Used to initiate long running jobs + such as calculating risk metrics or inspecting Google + Cloud Storage. + parent (:class:`str`): + Required. Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_job (:class:`~.dlp.InspectJobConfig`): + Set to control what and how to + inspect. + This corresponds to the ``inspect_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + risk_job (:class:`~.dlp.RiskAnalysisJobConfig`): + Set to choose what metric to + calculate. + This corresponds to the ``risk_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, inspect_job, risk_job]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.CreateDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if inspect_job is not None: + request.inspect_job = inspect_job + if risk_job is not None: + request.risk_job = risk_job + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_dlp_job, + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_dlp_jobs( + self, + request: dlp.ListDlpJobsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDlpJobsAsyncPager: + r"""Lists DlpJobs that match the specified filter in the + request. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Args: + request (:class:`~.dlp.ListDlpJobsRequest`): + The request object. The request message for listing DLP + jobs. + parent (:class:`str`): + Required. Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListDlpJobsAsyncPager: + The response message for listing DLP + jobs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.ListDlpJobsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_dlp_jobs, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDlpJobsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_dlp_job( + self, + request: dlp.GetDlpJobRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Gets the latest state of a long-running DlpJob. + See https://cloud.google.com/dlp/docs/inspecting-storage + and https://cloud.google.com/dlp/docs/compute-risk- + analysis to learn more. + + Args: + request (:class:`~.dlp.GetDlpJobRequest`): + The request object. The request message for + [DlpJobs.GetDlpJob][]. + name (:class:`str`): + Required. The name of the DlpJob + resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.GetDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_dlp_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_dlp_job( + self, + request: dlp.DeleteDlpJobRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running DlpJob. This method indicates + that the client is no longer interested in the DlpJob + result. The job will be cancelled if possible. + See https://cloud.google.com/dlp/docs/inspecting-storage + and https://cloud.google.com/dlp/docs/compute-risk- + analysis to learn more. + + Args: + request (:class:`~.dlp.DeleteDlpJobRequest`): + The request object. The request message for deleting a + DLP job. + name (:class:`str`): + Required. The name of the DlpJob + resource to be deleted. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.DeleteDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_dlp_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def cancel_dlp_job( + self, + request: dlp.CancelDlpJobRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running + DlpJob. The server makes a best effort to cancel the + DlpJob, but success is not guaranteed. + See https://cloud.google.com/dlp/docs/inspecting-storage + and https://cloud.google.com/dlp/docs/compute-risk- + analysis to learn more. + + Args: + request (:class:`~.dlp.CancelDlpJobRequest`): + The request object. The request message for canceling a + DLP job. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + + request = dlp.CancelDlpJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_dlp_job, + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def create_stored_info_type( + self, + request: dlp.CreateStoredInfoTypeRequest = None, + *, + parent: str = None, + config: dlp.StoredInfoTypeConfig = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Creates a pre-built stored infoType to be used for + inspection. See + https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Args: + request (:class:`~.dlp.CreateStoredInfoTypeRequest`): + The request object. Request message for + CreateStoredInfoType. + parent (:class:`str`): + Required. Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:organizations/[ORGANIZATION-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config (:class:`~.dlp.StoredInfoTypeConfig`): + Required. Configuration of the + storedInfoType to create. + This corresponds to the ``config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent, config]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.CreateStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if config is not None: + request.config = config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_stored_info_type, + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def update_stored_info_type( + self, + request: dlp.UpdateStoredInfoTypeRequest = None, + *, + name: str = None, + config: dlp.StoredInfoTypeConfig = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Updates the stored infoType by creating a new + version. The existing version will continue to be used + until the new version is ready. See + https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Args: + request (:class:`~.dlp.UpdateStoredInfoTypeRequest`): + The request object. Request message for + UpdateStoredInfoType. + name (:class:`str`): + Required. Resource name of organization and + storedInfoType to be updated, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config (:class:`~.dlp.StoredInfoTypeConfig`): + Updated configuration for the + storedInfoType. If not provided, a new + version of the storedInfoType will be + created with the existing configuration. + This corresponds to the ``config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`~.field_mask.FieldMask`): + Mask to control which fields get + updated. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name, config, update_mask]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.UpdateStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if config is not None: + request.config = config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_stored_info_type, + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def get_stored_info_type( + self, + request: dlp.GetStoredInfoTypeRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Gets a stored infoType. + See https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Args: + request (:class:`~.dlp.GetStoredInfoTypeRequest`): + The request object. Request message for + GetStoredInfoType. + name (:class:`str`): + Required. Resource name of the organization and + storedInfoType to be read, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.GetStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_stored_info_type, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_stored_info_types( + self, + request: dlp.ListStoredInfoTypesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListStoredInfoTypesAsyncPager: + r"""Lists stored infoTypes. + See https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Args: + request (:class:`~.dlp.ListStoredInfoTypesRequest`): + The request object. Request message for + ListStoredInfoTypes. + parent (:class:`str`): + Required. Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:organizations/[ORGANIZATION-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListStoredInfoTypesAsyncPager: + Response message for + ListStoredInfoTypes. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([parent]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.ListStoredInfoTypesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_stored_info_types, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListStoredInfoTypesAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_stored_info_type( + self, + request: dlp.DeleteStoredInfoTypeRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a stored infoType. + See https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Args: + request (:class:`~.dlp.DeleteStoredInfoTypeRequest`): + The request object. Request message for + DeleteStoredInfoType. + name (:class:`str`): + Required. Resource name of the organization and + storedInfoType to be deleted, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.DeleteStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_stored_info_type, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def hybrid_inspect_dlp_job( + self, + request: dlp.HybridInspectDlpJobRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.HybridInspectResponse: + r"""Inspect hybrid content and store findings to a job. + To review the findings inspect the job. Inspection will + occur asynchronously. + Early access feature is in a pre-release state and might + change or have limited support. For more information, + see + https://cloud.google.com/products#product-launch-stages. + + Args: + request (:class:`~.dlp.HybridInspectDlpJobRequest`): + The request object. Request to search for potentially + sensitive info in a custom location. + name (:class:`str`): + Required. Resource name of the job to execute a hybrid + inspect on, for example + ``projects/dlp-test-project/dlpJob/53234423``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + if request is not None and any([name]): + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + request = dlp.HybridInspectDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.hybrid_inspect_dlp_job, + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def finish_dlp_job( + self, + request: dlp.FinishDlpJobRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Finish a running hybrid DlpJob. Triggers the + finalization steps and running of any enabled actions + that have not yet run. Early access feature is in a pre- + release state and might change or have limited support. + For more information, see + https://cloud.google.com/products#product-launch-stages. + + Args: + request (:class:`~.dlp.FinishDlpJobRequest`): + The request object. The request message for finishing a + DLP hybrid job. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + + request = dlp.FinishDlpJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.finish_dlp_job, + default_timeout=300.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + +try: + _client_info = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-dlp",).version, + ) +except pkg_resources.DistributionNotFound: + _client_info = gapic_v1.client_info.ClientInfo() + + +__all__ = ("DlpServiceAsyncClient",) diff --git a/google/cloud/dlp_v2/services/dlp_service/client.py b/google/cloud/dlp_v2/services/dlp_service/client.py new file mode 100644 index 00000000..b87b761f --- /dev/null +++ b/google/cloud/dlp_v2/services/dlp_service/client.py @@ -0,0 +1,2951 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import os +import re +from typing import Callable, Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dlp_v2.services.dlp_service import pagers +from google.cloud.dlp_v2.types import dlp +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + +from .transports.base import DlpServiceTransport +from .transports.grpc import DlpServiceGrpcTransport +from .transports.grpc_asyncio import DlpServiceGrpcAsyncIOTransport + + +class DlpServiceClientMeta(type): + """Metaclass for the DlpService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[DlpServiceTransport]] + _transport_registry["grpc"] = DlpServiceGrpcTransport + _transport_registry["grpc_asyncio"] = DlpServiceGrpcAsyncIOTransport + + def get_transport_class(cls, label: str = None,) -> Type[DlpServiceTransport]: + """Return an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DlpServiceClient(metaclass=DlpServiceClientMeta): + """The Cloud Data Loss Prevention (DLP) API is a service that + allows clients to detect the presence of Personally Identifiable + Information (PII) and other privacy-sensitive data in user- + supplied, unstructured data streams, like text blocks or images. + The service also includes methods for sensitive data redaction + and scheduling of data scans on Google Cloud Platform based data + sets. + To learn more about concepts and find how-to guides see + https://cloud.google.com/dlp/docs/. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Convert api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dlp.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + {@api.name}: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @staticmethod + def deidentify_template_path(organization: str, deidentify_template: str,) -> str: + """Return a fully-qualified deidentify_template string.""" + return "organizations/{organization}/deidentifyTemplates/{deidentify_template}".format( + organization=organization, deidentify_template=deidentify_template, + ) + + @staticmethod + def parse_deidentify_template_path(path: str) -> Dict[str, str]: + """Parse a deidentify_template path into its component segments.""" + m = re.match( + r"^organizations/(?P.+?)/deidentifyTemplates/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def inspect_template_path(organization: str, inspect_template: str,) -> str: + """Return a fully-qualified inspect_template string.""" + return "organizations/{organization}/inspectTemplates/{inspect_template}".format( + organization=organization, inspect_template=inspect_template, + ) + + @staticmethod + def parse_inspect_template_path(path: str) -> Dict[str, str]: + """Parse a inspect_template path into its component segments.""" + m = re.match( + r"^organizations/(?P.+?)/inspectTemplates/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def job_trigger_path(project: str, job_trigger: str,) -> str: + """Return a fully-qualified job_trigger string.""" + return "projects/{project}/jobTriggers/{job_trigger}".format( + project=project, job_trigger=job_trigger, + ) + + @staticmethod + def parse_job_trigger_path(path: str) -> Dict[str, str]: + """Parse a job_trigger path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/jobTriggers/(?P.+?)$", path + ) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, DlpServiceTransport] = None, + client_options: ClientOptions = None, + ) -> None: + """Instantiate the dlp service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DlpServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint, this is the default value for + the environment variable) and "auto" (auto switch to the default + mTLS endpoint if client SSL credentials is present). However, + the ``api_endpoint`` property takes precedence if provided. + (2) The ``client_cert_source`` property is used to provide client + SSL credentials for mutual TLS transport. If not provided, the + default SSL credentials will be used if present. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = ClientOptions.from_dict(client_options) + if client_options is None: + client_options = ClientOptions.ClientOptions() + + if client_options.api_endpoint is None: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never") + if use_mtls_env == "never": + client_options.api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + has_client_cert_source = ( + client_options.client_cert_source is not None + or mtls.has_default_client_cert_source() + ) + client_options.api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT + if has_client_cert_source + else self.DEFAULT_ENDPOINT + ) + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, DlpServiceTransport): + # transport is a DlpServiceTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, " + "provide its scopes directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=client_options.api_endpoint, + scopes=client_options.scopes, + api_mtls_endpoint=client_options.api_endpoint, + client_cert_source=client_options.client_cert_source, + quota_project_id=client_options.quota_project_id, + ) + + def inspect_content( + self, + request: dlp.InspectContentRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectContentResponse: + r"""Finds potentially sensitive info in content. + This method has limits on input size, processing time, + and output size. + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + For how to guides, see + https://cloud.google.com/dlp/docs/inspecting-images and + https://cloud.google.com/dlp/docs/inspecting-text, + + Args: + request (:class:`~.dlp.InspectContentRequest`): + The request object. Request to search for potentially + sensitive info in a ContentItem. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.InspectContentResponse: + Results of inspecting an item. + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.InspectContentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.InspectContentRequest): + request = dlp.InspectContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.inspect_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def redact_image( + self, + request: dlp.RedactImageRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.RedactImageResponse: + r"""Redacts potentially sensitive info from an image. + This method has limits on input size, processing time, + and output size. See + https://cloud.google.com/dlp/docs/redacting-sensitive- + data-images to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Args: + request (:class:`~.dlp.RedactImageRequest`): + The request object. Request to search for potentially + sensitive info in an image and redact it by covering it + with a colored rectangle. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.RedactImageResponse: + Results of redacting an image. + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.RedactImageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.RedactImageRequest): + request = dlp.RedactImageRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.redact_image] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def deidentify_content( + self, + request: dlp.DeidentifyContentRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyContentResponse: + r"""De-identifies potentially sensitive info from a + ContentItem. This method has limits on input size and + output size. See + https://cloud.google.com/dlp/docs/deidentify-sensitive- + data to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Args: + request (:class:`~.dlp.DeidentifyContentRequest`): + The request object. Request to de-identify a list of + items. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DeidentifyContentResponse: + Results of de-identifying a + ContentItem. + + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeidentifyContentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeidentifyContentRequest): + request = dlp.DeidentifyContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.deidentify_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def reidentify_content( + self, + request: dlp.ReidentifyContentRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ReidentifyContentResponse: + r"""Re-identifies content that has been de-identified. See + https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example + to learn more. + + Args: + request (:class:`~.dlp.ReidentifyContentRequest`): + The request object. Request to re-identify an item. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ReidentifyContentResponse: + Results of re-identifying a item. + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ReidentifyContentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ReidentifyContentRequest): + request = dlp.ReidentifyContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.reidentify_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_info_types( + self, + request: dlp.ListInfoTypesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ListInfoTypesResponse: + r"""Returns a list of the sensitive information types + that the DLP API supports. See + https://cloud.google.com/dlp/docs/infotypes-reference to + learn more. + + Args: + request (:class:`~.dlp.ListInfoTypesRequest`): + The request object. Request for the list of infoTypes. + parent (:class:`str`): + The parent resource name. + + - Format:locations/[LOCATION-ID] + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.ListInfoTypesResponse: + Response to the ListInfoTypes + request. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListInfoTypesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListInfoTypesRequest): + request = dlp.ListInfoTypesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_info_types] + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def create_inspect_template( + self, + request: dlp.CreateInspectTemplateRequest = None, + *, + parent: str = None, + inspect_template: dlp.InspectTemplate = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Creates an InspectTemplate for re-using frequently + used configuration for inspecting content, images, and + storage. See https://cloud.google.com/dlp/docs/creating- + templates to learn more. + + Args: + request (:class:`~.dlp.CreateInspectTemplateRequest`): + The request object. Request message for + CreateInspectTemplate. + parent (:class:`str`): + Required. Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:organizations/[ORGANIZATION-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_template (:class:`~.dlp.InspectTemplate`): + Required. The InspectTemplate to + create. + This corresponds to the ``inspect_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts- + templates to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, inspect_template]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CreateInspectTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CreateInspectTemplateRequest): + request = dlp.CreateInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if inspect_template is not None: + request.inspect_template = inspect_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def update_inspect_template( + self, + request: dlp.UpdateInspectTemplateRequest = None, + *, + name: str = None, + inspect_template: dlp.InspectTemplate = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Updates the InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Args: + request (:class:`~.dlp.UpdateInspectTemplateRequest`): + The request object. Request message for + UpdateInspectTemplate. + name (:class:`str`): + Required. Resource name of organization and + inspectTemplate to be updated, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_template (:class:`~.dlp.InspectTemplate`): + New InspectTemplate value. + This corresponds to the ``inspect_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`~.field_mask.FieldMask`): + Mask to control which fields get + updated. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts- + templates to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, inspect_template, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.UpdateInspectTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.UpdateInspectTemplateRequest): + request = dlp.UpdateInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if inspect_template is not None: + request.inspect_template = inspect_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_inspect_template( + self, + request: dlp.GetInspectTemplateRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Gets an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Args: + request (:class:`~.dlp.GetInspectTemplateRequest`): + The request object. Request message for + GetInspectTemplate. + name (:class:`str`): + Required. Resource name of the organization and + inspectTemplate to be read, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts- + templates to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.GetInspectTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.GetInspectTemplateRequest): + request = dlp.GetInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_inspect_templates( + self, + request: dlp.ListInspectTemplatesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInspectTemplatesPager: + r"""Lists InspectTemplates. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Args: + request (:class:`~.dlp.ListInspectTemplatesRequest`): + The request object. Request message for + ListInspectTemplates. + parent (:class:`str`): + Required. Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:organizations/[ORGANIZATION-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListInspectTemplatesPager: + Response message for + ListInspectTemplates. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListInspectTemplatesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListInspectTemplatesRequest): + request = dlp.ListInspectTemplatesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_inspect_templates] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInspectTemplatesPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_inspect_template( + self, + request: dlp.DeleteInspectTemplateRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Args: + request (:class:`~.dlp.DeleteInspectTemplateRequest`): + The request object. Request message for + DeleteInspectTemplate. + name (:class:`str`): + Required. Resource name of the organization and + inspectTemplate to be deleted, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeleteInspectTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeleteInspectTemplateRequest): + request = dlp.DeleteInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def create_deidentify_template( + self, + request: dlp.CreateDeidentifyTemplateRequest = None, + *, + parent: str = None, + deidentify_template: dlp.DeidentifyTemplate = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Creates a DeidentifyTemplate for re-using frequently + used configuration for de-identifying content, images, + and storage. See + https://cloud.google.com/dlp/docs/creating-templates- + deid to learn more. + + Args: + request (:class:`~.dlp.CreateDeidentifyTemplateRequest`): + The request object. Request message for + CreateDeidentifyTemplate. + parent (:class:`str`): + Required. Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:organizations/[ORGANIZATION-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deidentify_template (:class:`~.dlp.DeidentifyTemplate`): + Required. The DeidentifyTemplate to + create. + This corresponds to the ``deidentify_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts- + templates to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, deidentify_template]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CreateDeidentifyTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CreateDeidentifyTemplateRequest): + request = dlp.CreateDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if deidentify_template is not None: + request.deidentify_template = deidentify_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.create_deidentify_template + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def update_deidentify_template( + self, + request: dlp.UpdateDeidentifyTemplateRequest = None, + *, + name: str = None, + deidentify_template: dlp.DeidentifyTemplate = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Updates the DeidentifyTemplate. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Args: + request (:class:`~.dlp.UpdateDeidentifyTemplateRequest`): + The request object. Request message for + UpdateDeidentifyTemplate. + name (:class:`str`): + Required. Resource name of organization and deidentify + template to be updated, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deidentify_template (:class:`~.dlp.DeidentifyTemplate`): + New DeidentifyTemplate value. + This corresponds to the ``deidentify_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`~.field_mask.FieldMask`): + Mask to control which fields get + updated. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts- + templates to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, deidentify_template, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.UpdateDeidentifyTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.UpdateDeidentifyTemplateRequest): + request = dlp.UpdateDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if deidentify_template is not None: + request.deidentify_template = deidentify_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.update_deidentify_template + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_deidentify_template( + self, + request: dlp.GetDeidentifyTemplateRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Gets a DeidentifyTemplate. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Args: + request (:class:`~.dlp.GetDeidentifyTemplateRequest`): + The request object. Request message for + GetDeidentifyTemplate. + name (:class:`str`): + Required. Resource name of the organization and + deidentify template to be read, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts- + templates to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.GetDeidentifyTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.GetDeidentifyTemplateRequest): + request = dlp.GetDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_deidentify_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_deidentify_templates( + self, + request: dlp.ListDeidentifyTemplatesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDeidentifyTemplatesPager: + r"""Lists DeidentifyTemplates. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Args: + request (:class:`~.dlp.ListDeidentifyTemplatesRequest`): + The request object. Request message for + ListDeidentifyTemplates. + parent (:class:`str`): + Required. Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:organizations/[ORGANIZATION-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListDeidentifyTemplatesPager: + Response message for + ListDeidentifyTemplates. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListDeidentifyTemplatesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListDeidentifyTemplatesRequest): + request = dlp.ListDeidentifyTemplatesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_deidentify_templates + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDeidentifyTemplatesPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_deidentify_template( + self, + request: dlp.DeleteDeidentifyTemplateRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a DeidentifyTemplate. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Args: + request (:class:`~.dlp.DeleteDeidentifyTemplateRequest`): + The request object. Request message for + DeleteDeidentifyTemplate. + name (:class:`str`): + Required. Resource name of the organization and + deidentify template to be deleted, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeleteDeidentifyTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeleteDeidentifyTemplateRequest): + request = dlp.DeleteDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.delete_deidentify_template + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def create_job_trigger( + self, + request: dlp.CreateJobTriggerRequest = None, + *, + parent: str = None, + job_trigger: dlp.JobTrigger = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Creates a job trigger to run DLP actions such as + scanning storage for sensitive information on a set + schedule. See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Args: + request (:class:`~.dlp.CreateJobTriggerRequest`): + The request object. Request message for + CreateJobTrigger. + parent (:class:`str`): + Required. Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_trigger (:class:`~.dlp.JobTrigger`): + Required. The JobTrigger to create. + This corresponds to the ``job_trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts- + job-triggers to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, job_trigger]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CreateJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CreateJobTriggerRequest): + request = dlp.CreateJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if job_trigger is not None: + request.job_trigger = job_trigger + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def update_job_trigger( + self, + request: dlp.UpdateJobTriggerRequest = None, + *, + name: str = None, + job_trigger: dlp.JobTrigger = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Updates a job trigger. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Args: + request (:class:`~.dlp.UpdateJobTriggerRequest`): + The request object. Request message for + UpdateJobTrigger. + name (:class:`str`): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_trigger (:class:`~.dlp.JobTrigger`): + New JobTrigger value. + This corresponds to the ``job_trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`~.field_mask.FieldMask`): + Mask to control which fields get + updated. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts- + job-triggers to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, job_trigger, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.UpdateJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.UpdateJobTriggerRequest): + request = dlp.UpdateJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if job_trigger is not None: + request.job_trigger = job_trigger + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def hybrid_inspect_job_trigger( + self, + request: dlp.HybridInspectJobTriggerRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.HybridInspectResponse: + r"""Inspect hybrid content and store findings to a + trigger. The inspection will be processed + asynchronously. To review the findings monitor the jobs + within the trigger. + Early access feature is in a pre-release state and might + change or have limited support. For more information, + see + https://cloud.google.com/products#product-launch-stages. + + Args: + request (:class:`~.dlp.HybridInspectJobTriggerRequest`): + The request object. Request to search for potentially + sensitive info in a custom location. + name (:class:`str`): + Required. Resource name of the trigger to execute a + hybrid inspect on, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.HybridInspectJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.HybridInspectJobTriggerRequest): + request = dlp.HybridInspectJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.hybrid_inspect_job_trigger + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_job_trigger( + self, + request: dlp.GetJobTriggerRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Gets a job trigger. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Args: + request (:class:`~.dlp.GetJobTriggerRequest`): + The request object. Request message for GetJobTrigger. + name (:class:`str`): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts- + job-triggers to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.GetJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.GetJobTriggerRequest): + request = dlp.GetJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_job_triggers( + self, + request: dlp.ListJobTriggersRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobTriggersPager: + r"""Lists job triggers. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Args: + request (:class:`~.dlp.ListJobTriggersRequest`): + The request object. Request message for ListJobTriggers. + parent (:class:`str`): + Required. Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListJobTriggersPager: + Response message for ListJobTriggers. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListJobTriggersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListJobTriggersRequest): + request = dlp.ListJobTriggersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_job_triggers] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListJobTriggersPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_job_trigger( + self, + request: dlp.DeleteJobTriggerRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a job trigger. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Args: + request (:class:`~.dlp.DeleteJobTriggerRequest`): + The request object. Request message for + DeleteJobTrigger. + name (:class:`str`): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeleteJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeleteJobTriggerRequest): + request = dlp.DeleteJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def activate_job_trigger( + self, + request: dlp.ActivateJobTriggerRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Activate a job trigger. Causes the immediate execute + of a trigger instead of waiting on the trigger event to + occur. + + Args: + request (:class:`~.dlp.ActivateJobTriggerRequest`): + The request object. Request message for + ActivateJobTrigger. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ActivateJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ActivateJobTriggerRequest): + request = dlp.ActivateJobTriggerRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.activate_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def create_dlp_job( + self, + request: dlp.CreateDlpJobRequest = None, + *, + parent: str = None, + inspect_job: dlp.InspectJobConfig = None, + risk_job: dlp.RiskAnalysisJobConfig = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Creates a new job to inspect storage or calculate + risk metrics. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + When no InfoTypes or CustomInfoTypes are specified in + inspect jobs, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Args: + request (:class:`~.dlp.CreateDlpJobRequest`): + The request object. Request message for + CreateDlpJobRequest. Used to initiate long running jobs + such as calculating risk metrics or inspecting Google + Cloud Storage. + parent (:class:`str`): + Required. Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_job (:class:`~.dlp.InspectJobConfig`): + Set to control what and how to + inspect. + This corresponds to the ``inspect_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + risk_job (:class:`~.dlp.RiskAnalysisJobConfig`): + Set to choose what metric to + calculate. + This corresponds to the ``risk_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, inspect_job, risk_job]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CreateDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CreateDlpJobRequest): + request = dlp.CreateDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if inspect_job is not None: + request.inspect_job = inspect_job + if risk_job is not None: + request.risk_job = risk_job + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_dlp_jobs( + self, + request: dlp.ListDlpJobsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDlpJobsPager: + r"""Lists DlpJobs that match the specified filter in the + request. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Args: + request (:class:`~.dlp.ListDlpJobsRequest`): + The request object. The request message for listing DLP + jobs. + parent (:class:`str`): + Required. Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListDlpJobsPager: + The response message for listing DLP + jobs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListDlpJobsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListDlpJobsRequest): + request = dlp.ListDlpJobsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_dlp_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDlpJobsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_dlp_job( + self, + request: dlp.GetDlpJobRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Gets the latest state of a long-running DlpJob. + See https://cloud.google.com/dlp/docs/inspecting-storage + and https://cloud.google.com/dlp/docs/compute-risk- + analysis to learn more. + + Args: + request (:class:`~.dlp.GetDlpJobRequest`): + The request object. The request message for + [DlpJobs.GetDlpJob][]. + name (:class:`str`): + Required. The name of the DlpJob + resource. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.GetDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.GetDlpJobRequest): + request = dlp.GetDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_dlp_job( + self, + request: dlp.DeleteDlpJobRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running DlpJob. This method indicates + that the client is no longer interested in the DlpJob + result. The job will be cancelled if possible. + See https://cloud.google.com/dlp/docs/inspecting-storage + and https://cloud.google.com/dlp/docs/compute-risk- + analysis to learn more. + + Args: + request (:class:`~.dlp.DeleteDlpJobRequest`): + The request object. The request message for deleting a + DLP job. + name (:class:`str`): + Required. The name of the DlpJob + resource to be deleted. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeleteDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeleteDlpJobRequest): + request = dlp.DeleteDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def cancel_dlp_job( + self, + request: dlp.CancelDlpJobRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running + DlpJob. The server makes a best effort to cancel the + DlpJob, but success is not guaranteed. + See https://cloud.google.com/dlp/docs/inspecting-storage + and https://cloud.google.com/dlp/docs/compute-risk- + analysis to learn more. + + Args: + request (:class:`~.dlp.CancelDlpJobRequest`): + The request object. The request message for canceling a + DLP job. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CancelDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CancelDlpJobRequest): + request = dlp.CancelDlpJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def create_stored_info_type( + self, + request: dlp.CreateStoredInfoTypeRequest = None, + *, + parent: str = None, + config: dlp.StoredInfoTypeConfig = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Creates a pre-built stored infoType to be used for + inspection. See + https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Args: + request (:class:`~.dlp.CreateStoredInfoTypeRequest`): + The request object. Request message for + CreateStoredInfoType. + parent (:class:`str`): + Required. Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:organizations/[ORGANIZATION-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config (:class:`~.dlp.StoredInfoTypeConfig`): + Required. Configuration of the + storedInfoType to create. + This corresponds to the ``config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, config]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CreateStoredInfoTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CreateStoredInfoTypeRequest): + request = dlp.CreateStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + if config is not None: + request.config = config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def update_stored_info_type( + self, + request: dlp.UpdateStoredInfoTypeRequest = None, + *, + name: str = None, + config: dlp.StoredInfoTypeConfig = None, + update_mask: field_mask.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Updates the stored infoType by creating a new + version. The existing version will continue to be used + until the new version is ready. See + https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Args: + request (:class:`~.dlp.UpdateStoredInfoTypeRequest`): + The request object. Request message for + UpdateStoredInfoType. + name (:class:`str`): + Required. Resource name of organization and + storedInfoType to be updated, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config (:class:`~.dlp.StoredInfoTypeConfig`): + Updated configuration for the + storedInfoType. If not provided, a new + version of the storedInfoType will be + created with the existing configuration. + This corresponds to the ``config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`~.field_mask.FieldMask`): + Mask to control which fields get + updated. + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.UpdateStoredInfoTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.UpdateStoredInfoTypeRequest): + request = dlp.UpdateStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + if config is not None: + request.config = config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def get_stored_info_type( + self, + request: dlp.GetStoredInfoTypeRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Gets a stored infoType. + See https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Args: + request (:class:`~.dlp.GetStoredInfoTypeRequest`): + The request object. Request message for + GetStoredInfoType. + name (:class:`str`): + Required. Resource name of the organization and + storedInfoType to be read, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.GetStoredInfoTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.GetStoredInfoTypeRequest): + request = dlp.GetStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_stored_info_types( + self, + request: dlp.ListStoredInfoTypesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListStoredInfoTypesPager: + r"""Lists stored infoTypes. + See https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Args: + request (:class:`~.dlp.ListStoredInfoTypesRequest`): + The request object. Request message for + ListStoredInfoTypes. + parent (:class:`str`): + Required. Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:organizations/[ORGANIZATION-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListStoredInfoTypesPager: + Response message for + ListStoredInfoTypes. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListStoredInfoTypesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListStoredInfoTypesRequest): + request = dlp.ListStoredInfoTypesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_stored_info_types] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListStoredInfoTypesPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_stored_info_type( + self, + request: dlp.DeleteStoredInfoTypeRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a stored infoType. + See https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Args: + request (:class:`~.dlp.DeleteStoredInfoTypeRequest`): + The request object. Request message for + DeleteStoredInfoType. + name (:class:`str`): + Required. Resource name of the organization and + storedInfoType to be deleted, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeleteStoredInfoTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeleteStoredInfoTypeRequest): + request = dlp.DeleteStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def hybrid_inspect_dlp_job( + self, + request: dlp.HybridInspectDlpJobRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.HybridInspectResponse: + r"""Inspect hybrid content and store findings to a job. + To review the findings inspect the job. Inspection will + occur asynchronously. + Early access feature is in a pre-release state and might + change or have limited support. For more information, + see + https://cloud.google.com/products#product-launch-stages. + + Args: + request (:class:`~.dlp.HybridInspectDlpJobRequest`): + The request object. Request to search for potentially + sensitive info in a custom location. + name (:class:`str`): + Required. Resource name of the job to execute a hybrid + inspect on, for example + ``projects/dlp-test-project/dlpJob/53234423``. + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dlp.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.HybridInspectDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.HybridInspectDlpJobRequest): + request = dlp.HybridInspectDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.hybrid_inspect_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def finish_dlp_job( + self, + request: dlp.FinishDlpJobRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Finish a running hybrid DlpJob. Triggers the + finalization steps and running of any enabled actions + that have not yet run. Early access feature is in a pre- + release state and might change or have limited support. + For more information, see + https://cloud.google.com/products#product-launch-stages. + + Args: + request (:class:`~.dlp.FinishDlpJobRequest`): + The request object. The request message for finishing a + DLP hybrid job. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.FinishDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.FinishDlpJobRequest): + request = dlp.FinishDlpJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.finish_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + +try: + _client_info = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-dlp",).version, + ) +except pkg_resources.DistributionNotFound: + _client_info = gapic_v1.client_info.ClientInfo() + + +__all__ = ("DlpServiceClient",) diff --git a/google/cloud/dlp_v2/services/dlp_service/pagers.py b/google/cloud/dlp_v2/services/dlp_service/pagers.py new file mode 100644 index 00000000..0bb564e2 --- /dev/null +++ b/google/cloud/dlp_v2/services/dlp_service/pagers.py @@ -0,0 +1,660 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple + +from google.cloud.dlp_v2.types import dlp + + +class ListInspectTemplatesPager: + """A pager for iterating through ``list_inspect_templates`` requests. + + This class thinly wraps an initial + :class:`~.dlp.ListInspectTemplatesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``inspect_templates`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInspectTemplates`` requests and continue to iterate + through the ``inspect_templates`` field on the + corresponding responses. + + All the usual :class:`~.dlp.ListInspectTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., dlp.ListInspectTemplatesResponse], + request: dlp.ListInspectTemplatesRequest, + response: dlp.ListInspectTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.dlp.ListInspectTemplatesRequest`): + The initial request object. + response (:class:`~.dlp.ListInspectTemplatesResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListInspectTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[dlp.ListInspectTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[dlp.InspectTemplate]: + for page in self.pages: + yield from page.inspect_templates + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListInspectTemplatesAsyncPager: + """A pager for iterating through ``list_inspect_templates`` requests. + + This class thinly wraps an initial + :class:`~.dlp.ListInspectTemplatesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``inspect_templates`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInspectTemplates`` requests and continue to iterate + through the ``inspect_templates`` field on the + corresponding responses. + + All the usual :class:`~.dlp.ListInspectTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[dlp.ListInspectTemplatesResponse]], + request: dlp.ListInspectTemplatesRequest, + response: dlp.ListInspectTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.dlp.ListInspectTemplatesRequest`): + The initial request object. + response (:class:`~.dlp.ListInspectTemplatesResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListInspectTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[dlp.ListInspectTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[dlp.InspectTemplate]: + async def async_generator(): + async for page in self.pages: + for response in page.inspect_templates: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDeidentifyTemplatesPager: + """A pager for iterating through ``list_deidentify_templates`` requests. + + This class thinly wraps an initial + :class:`~.dlp.ListDeidentifyTemplatesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``deidentify_templates`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDeidentifyTemplates`` requests and continue to iterate + through the ``deidentify_templates`` field on the + corresponding responses. + + All the usual :class:`~.dlp.ListDeidentifyTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., dlp.ListDeidentifyTemplatesResponse], + request: dlp.ListDeidentifyTemplatesRequest, + response: dlp.ListDeidentifyTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.dlp.ListDeidentifyTemplatesRequest`): + The initial request object. + response (:class:`~.dlp.ListDeidentifyTemplatesResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListDeidentifyTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[dlp.ListDeidentifyTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[dlp.DeidentifyTemplate]: + for page in self.pages: + yield from page.deidentify_templates + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDeidentifyTemplatesAsyncPager: + """A pager for iterating through ``list_deidentify_templates`` requests. + + This class thinly wraps an initial + :class:`~.dlp.ListDeidentifyTemplatesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``deidentify_templates`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDeidentifyTemplates`` requests and continue to iterate + through the ``deidentify_templates`` field on the + corresponding responses. + + All the usual :class:`~.dlp.ListDeidentifyTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[dlp.ListDeidentifyTemplatesResponse]], + request: dlp.ListDeidentifyTemplatesRequest, + response: dlp.ListDeidentifyTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.dlp.ListDeidentifyTemplatesRequest`): + The initial request object. + response (:class:`~.dlp.ListDeidentifyTemplatesResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListDeidentifyTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[dlp.ListDeidentifyTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[dlp.DeidentifyTemplate]: + async def async_generator(): + async for page in self.pages: + for response in page.deidentify_templates: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListJobTriggersPager: + """A pager for iterating through ``list_job_triggers`` requests. + + This class thinly wraps an initial + :class:`~.dlp.ListJobTriggersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``job_triggers`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListJobTriggers`` requests and continue to iterate + through the ``job_triggers`` field on the + corresponding responses. + + All the usual :class:`~.dlp.ListJobTriggersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., dlp.ListJobTriggersResponse], + request: dlp.ListJobTriggersRequest, + response: dlp.ListJobTriggersResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.dlp.ListJobTriggersRequest`): + The initial request object. + response (:class:`~.dlp.ListJobTriggersResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListJobTriggersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[dlp.ListJobTriggersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[dlp.JobTrigger]: + for page in self.pages: + yield from page.job_triggers + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListJobTriggersAsyncPager: + """A pager for iterating through ``list_job_triggers`` requests. + + This class thinly wraps an initial + :class:`~.dlp.ListJobTriggersResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``job_triggers`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListJobTriggers`` requests and continue to iterate + through the ``job_triggers`` field on the + corresponding responses. + + All the usual :class:`~.dlp.ListJobTriggersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[dlp.ListJobTriggersResponse]], + request: dlp.ListJobTriggersRequest, + response: dlp.ListJobTriggersResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.dlp.ListJobTriggersRequest`): + The initial request object. + response (:class:`~.dlp.ListJobTriggersResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListJobTriggersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[dlp.ListJobTriggersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[dlp.JobTrigger]: + async def async_generator(): + async for page in self.pages: + for response in page.job_triggers: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDlpJobsPager: + """A pager for iterating through ``list_dlp_jobs`` requests. + + This class thinly wraps an initial + :class:`~.dlp.ListDlpJobsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDlpJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`~.dlp.ListDlpJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., dlp.ListDlpJobsResponse], + request: dlp.ListDlpJobsRequest, + response: dlp.ListDlpJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.dlp.ListDlpJobsRequest`): + The initial request object. + response (:class:`~.dlp.ListDlpJobsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListDlpJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[dlp.ListDlpJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[dlp.DlpJob]: + for page in self.pages: + yield from page.jobs + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDlpJobsAsyncPager: + """A pager for iterating through ``list_dlp_jobs`` requests. + + This class thinly wraps an initial + :class:`~.dlp.ListDlpJobsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDlpJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`~.dlp.ListDlpJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[dlp.ListDlpJobsResponse]], + request: dlp.ListDlpJobsRequest, + response: dlp.ListDlpJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.dlp.ListDlpJobsRequest`): + The initial request object. + response (:class:`~.dlp.ListDlpJobsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListDlpJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[dlp.ListDlpJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[dlp.DlpJob]: + async def async_generator(): + async for page in self.pages: + for response in page.jobs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListStoredInfoTypesPager: + """A pager for iterating through ``list_stored_info_types`` requests. + + This class thinly wraps an initial + :class:`~.dlp.ListStoredInfoTypesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``stored_info_types`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListStoredInfoTypes`` requests and continue to iterate + through the ``stored_info_types`` field on the + corresponding responses. + + All the usual :class:`~.dlp.ListStoredInfoTypesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., dlp.ListStoredInfoTypesResponse], + request: dlp.ListStoredInfoTypesRequest, + response: dlp.ListStoredInfoTypesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.dlp.ListStoredInfoTypesRequest`): + The initial request object. + response (:class:`~.dlp.ListStoredInfoTypesResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListStoredInfoTypesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[dlp.ListStoredInfoTypesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[dlp.StoredInfoType]: + for page in self.pages: + yield from page.stored_info_types + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListStoredInfoTypesAsyncPager: + """A pager for iterating through ``list_stored_info_types`` requests. + + This class thinly wraps an initial + :class:`~.dlp.ListStoredInfoTypesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``stored_info_types`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListStoredInfoTypes`` requests and continue to iterate + through the ``stored_info_types`` field on the + corresponding responses. + + All the usual :class:`~.dlp.ListStoredInfoTypesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[dlp.ListStoredInfoTypesResponse]], + request: dlp.ListStoredInfoTypesRequest, + response: dlp.ListStoredInfoTypesResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.dlp.ListStoredInfoTypesRequest`): + The initial request object. + response (:class:`~.dlp.ListStoredInfoTypesResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListStoredInfoTypesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[dlp.ListStoredInfoTypesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[dlp.StoredInfoType]: + async def async_generator(): + async for page in self.pages: + for response in page.stored_info_types: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py b/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py new file mode 100644 index 00000000..983401ce --- /dev/null +++ b/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from typing import Dict, Type + +from .base import DlpServiceTransport +from .grpc import DlpServiceGrpcTransport +from .grpc_asyncio import DlpServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DlpServiceTransport]] +_transport_registry["grpc"] = DlpServiceGrpcTransport +_transport_registry["grpc_asyncio"] = DlpServiceGrpcAsyncIOTransport + + +__all__ = ( + "DlpServiceTransport", + "DlpServiceGrpcTransport", + "DlpServiceGrpcAsyncIOTransport", +) diff --git a/google/cloud/dlp_v2/services/dlp_service/transports/base.py b/google/cloud/dlp_v2/services/dlp_service/transports/base.py new file mode 100644 index 00000000..9a9978f1 --- /dev/null +++ b/google/cloud/dlp_v2/services/dlp_service/transports/base.py @@ -0,0 +1,763 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc +import typing +import pkg_resources + +from google import auth +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore + +from google.cloud.dlp_v2.types import dlp +from google.protobuf import empty_pb2 as empty # type: ignore + + +try: + _client_info = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution("google-cloud-dlp",).version, + ) +except pkg_resources.DistributionNotFound: + _client_info = gapic_v1.client_info.ClientInfo() + + +class DlpServiceTransport(abc.ABC): + """Abstract transport class for DlpService.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + def __init__( + self, + *, + host: str = "dlp.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = auth.load_credentials_from_file( + credentials_file, scopes=scopes, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = auth.default( + scopes=scopes, quota_project_id=quota_project_id + ) + + # Save the credentials. + self._credentials = credentials + + # Lifted into its own function so it can be stubbed out during tests. + self._prep_wrapped_messages() + + def _prep_wrapped_messages(self): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.inspect_content: gapic_v1.method.wrap_method( + self.inspect_content, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ), + self.redact_image: gapic_v1.method.wrap_method( + self.redact_image, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ), + self.deidentify_content: gapic_v1.method.wrap_method( + self.deidentify_content, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ), + self.reidentify_content: gapic_v1.method.wrap_method( + self.reidentify_content, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ), + self.list_info_types: gapic_v1.method.wrap_method( + self.list_info_types, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ), + self.create_inspect_template: gapic_v1.method.wrap_method( + self.create_inspect_template, + default_timeout=300.0, + client_info=_client_info, + ), + self.update_inspect_template: gapic_v1.method.wrap_method( + self.update_inspect_template, + default_timeout=300.0, + client_info=_client_info, + ), + self.get_inspect_template: gapic_v1.method.wrap_method( + self.get_inspect_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ), + self.list_inspect_templates: gapic_v1.method.wrap_method( + self.list_inspect_templates, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ), + self.delete_inspect_template: gapic_v1.method.wrap_method( + self.delete_inspect_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ), + self.create_deidentify_template: gapic_v1.method.wrap_method( + self.create_deidentify_template, + default_timeout=300.0, + client_info=_client_info, + ), + self.update_deidentify_template: gapic_v1.method.wrap_method( + self.update_deidentify_template, + default_timeout=300.0, + client_info=_client_info, + ), + self.get_deidentify_template: gapic_v1.method.wrap_method( + self.get_deidentify_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ), + self.list_deidentify_templates: gapic_v1.method.wrap_method( + self.list_deidentify_templates, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ), + self.delete_deidentify_template: gapic_v1.method.wrap_method( + self.delete_deidentify_template, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ), + self.create_job_trigger: gapic_v1.method.wrap_method( + self.create_job_trigger, + default_timeout=300.0, + client_info=_client_info, + ), + self.update_job_trigger: gapic_v1.method.wrap_method( + self.update_job_trigger, + default_timeout=300.0, + client_info=_client_info, + ), + self.hybrid_inspect_job_trigger: gapic_v1.method.wrap_method( + self.hybrid_inspect_job_trigger, + default_timeout=300.0, + client_info=_client_info, + ), + self.get_job_trigger: gapic_v1.method.wrap_method( + self.get_job_trigger, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ), + self.list_job_triggers: gapic_v1.method.wrap_method( + self.list_job_triggers, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ), + self.delete_job_trigger: gapic_v1.method.wrap_method( + self.delete_job_trigger, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ), + self.activate_job_trigger: gapic_v1.method.wrap_method( + self.activate_job_trigger, + default_timeout=300.0, + client_info=_client_info, + ), + self.create_dlp_job: gapic_v1.method.wrap_method( + self.create_dlp_job, default_timeout=300.0, client_info=_client_info, + ), + self.list_dlp_jobs: gapic_v1.method.wrap_method( + self.list_dlp_jobs, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ), + self.get_dlp_job: gapic_v1.method.wrap_method( + self.get_dlp_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ), + self.delete_dlp_job: gapic_v1.method.wrap_method( + self.delete_dlp_job, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ), + self.cancel_dlp_job: gapic_v1.method.wrap_method( + self.cancel_dlp_job, default_timeout=300.0, client_info=_client_info, + ), + self.create_stored_info_type: gapic_v1.method.wrap_method( + self.create_stored_info_type, + default_timeout=300.0, + client_info=_client_info, + ), + self.update_stored_info_type: gapic_v1.method.wrap_method( + self.update_stored_info_type, + default_timeout=300.0, + client_info=_client_info, + ), + self.get_stored_info_type: gapic_v1.method.wrap_method( + self.get_stored_info_type, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ), + self.list_stored_info_types: gapic_v1.method.wrap_method( + self.list_stored_info_types, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ), + self.delete_stored_info_type: gapic_v1.method.wrap_method( + self.delete_stored_info_type, + default_retry=retries.Retry( + initial=0.1, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + exceptions.DeadlineExceeded, exceptions.ServiceUnavailable, + ), + ), + default_timeout=300.0, + client_info=_client_info, + ), + self.hybrid_inspect_dlp_job: gapic_v1.method.wrap_method( + self.hybrid_inspect_dlp_job, + default_timeout=300.0, + client_info=_client_info, + ), + self.finish_dlp_job: gapic_v1.method.wrap_method( + self.finish_dlp_job, default_timeout=300.0, client_info=_client_info, + ), + } + + @property + def inspect_content( + self, + ) -> typing.Callable[ + [dlp.InspectContentRequest], + typing.Union[ + dlp.InspectContentResponse, typing.Awaitable[dlp.InspectContentResponse] + ], + ]: + raise NotImplementedError() + + @property + def redact_image( + self, + ) -> typing.Callable[ + [dlp.RedactImageRequest], + typing.Union[ + dlp.RedactImageResponse, typing.Awaitable[dlp.RedactImageResponse] + ], + ]: + raise NotImplementedError() + + @property + def deidentify_content( + self, + ) -> typing.Callable[ + [dlp.DeidentifyContentRequest], + typing.Union[ + dlp.DeidentifyContentResponse, + typing.Awaitable[dlp.DeidentifyContentResponse], + ], + ]: + raise NotImplementedError() + + @property + def reidentify_content( + self, + ) -> typing.Callable[ + [dlp.ReidentifyContentRequest], + typing.Union[ + dlp.ReidentifyContentResponse, + typing.Awaitable[dlp.ReidentifyContentResponse], + ], + ]: + raise NotImplementedError() + + @property + def list_info_types( + self, + ) -> typing.Callable[ + [dlp.ListInfoTypesRequest], + typing.Union[ + dlp.ListInfoTypesResponse, typing.Awaitable[dlp.ListInfoTypesResponse] + ], + ]: + raise NotImplementedError() + + @property + def create_inspect_template( + self, + ) -> typing.Callable[ + [dlp.CreateInspectTemplateRequest], + typing.Union[dlp.InspectTemplate, typing.Awaitable[dlp.InspectTemplate]], + ]: + raise NotImplementedError() + + @property + def update_inspect_template( + self, + ) -> typing.Callable[ + [dlp.UpdateInspectTemplateRequest], + typing.Union[dlp.InspectTemplate, typing.Awaitable[dlp.InspectTemplate]], + ]: + raise NotImplementedError() + + @property + def get_inspect_template( + self, + ) -> typing.Callable[ + [dlp.GetInspectTemplateRequest], + typing.Union[dlp.InspectTemplate, typing.Awaitable[dlp.InspectTemplate]], + ]: + raise NotImplementedError() + + @property + def list_inspect_templates( + self, + ) -> typing.Callable[ + [dlp.ListInspectTemplatesRequest], + typing.Union[ + dlp.ListInspectTemplatesResponse, + typing.Awaitable[dlp.ListInspectTemplatesResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_inspect_template( + self, + ) -> typing.Callable[ + [dlp.DeleteInspectTemplateRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def create_deidentify_template( + self, + ) -> typing.Callable[ + [dlp.CreateDeidentifyTemplateRequest], + typing.Union[dlp.DeidentifyTemplate, typing.Awaitable[dlp.DeidentifyTemplate]], + ]: + raise NotImplementedError() + + @property + def update_deidentify_template( + self, + ) -> typing.Callable[ + [dlp.UpdateDeidentifyTemplateRequest], + typing.Union[dlp.DeidentifyTemplate, typing.Awaitable[dlp.DeidentifyTemplate]], + ]: + raise NotImplementedError() + + @property + def get_deidentify_template( + self, + ) -> typing.Callable[ + [dlp.GetDeidentifyTemplateRequest], + typing.Union[dlp.DeidentifyTemplate, typing.Awaitable[dlp.DeidentifyTemplate]], + ]: + raise NotImplementedError() + + @property + def list_deidentify_templates( + self, + ) -> typing.Callable[ + [dlp.ListDeidentifyTemplatesRequest], + typing.Union[ + dlp.ListDeidentifyTemplatesResponse, + typing.Awaitable[dlp.ListDeidentifyTemplatesResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_deidentify_template( + self, + ) -> typing.Callable[ + [dlp.DeleteDeidentifyTemplateRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def create_job_trigger( + self, + ) -> typing.Callable[ + [dlp.CreateJobTriggerRequest], + typing.Union[dlp.JobTrigger, typing.Awaitable[dlp.JobTrigger]], + ]: + raise NotImplementedError() + + @property + def update_job_trigger( + self, + ) -> typing.Callable[ + [dlp.UpdateJobTriggerRequest], + typing.Union[dlp.JobTrigger, typing.Awaitable[dlp.JobTrigger]], + ]: + raise NotImplementedError() + + @property + def hybrid_inspect_job_trigger( + self, + ) -> typing.Callable[ + [dlp.HybridInspectJobTriggerRequest], + typing.Union[ + dlp.HybridInspectResponse, typing.Awaitable[dlp.HybridInspectResponse] + ], + ]: + raise NotImplementedError() + + @property + def get_job_trigger( + self, + ) -> typing.Callable[ + [dlp.GetJobTriggerRequest], + typing.Union[dlp.JobTrigger, typing.Awaitable[dlp.JobTrigger]], + ]: + raise NotImplementedError() + + @property + def list_job_triggers( + self, + ) -> typing.Callable[ + [dlp.ListJobTriggersRequest], + typing.Union[ + dlp.ListJobTriggersResponse, typing.Awaitable[dlp.ListJobTriggersResponse] + ], + ]: + raise NotImplementedError() + + @property + def delete_job_trigger( + self, + ) -> typing.Callable[ + [dlp.DeleteJobTriggerRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def activate_job_trigger( + self, + ) -> typing.Callable[ + [dlp.ActivateJobTriggerRequest], + typing.Union[dlp.DlpJob, typing.Awaitable[dlp.DlpJob]], + ]: + raise NotImplementedError() + + @property + def create_dlp_job( + self, + ) -> typing.Callable[ + [dlp.CreateDlpJobRequest], + typing.Union[dlp.DlpJob, typing.Awaitable[dlp.DlpJob]], + ]: + raise NotImplementedError() + + @property + def list_dlp_jobs( + self, + ) -> typing.Callable[ + [dlp.ListDlpJobsRequest], + typing.Union[ + dlp.ListDlpJobsResponse, typing.Awaitable[dlp.ListDlpJobsResponse] + ], + ]: + raise NotImplementedError() + + @property + def get_dlp_job( + self, + ) -> typing.Callable[ + [dlp.GetDlpJobRequest], typing.Union[dlp.DlpJob, typing.Awaitable[dlp.DlpJob]] + ]: + raise NotImplementedError() + + @property + def delete_dlp_job( + self, + ) -> typing.Callable[ + [dlp.DeleteDlpJobRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def cancel_dlp_job( + self, + ) -> typing.Callable[ + [dlp.CancelDlpJobRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def create_stored_info_type( + self, + ) -> typing.Callable[ + [dlp.CreateStoredInfoTypeRequest], + typing.Union[dlp.StoredInfoType, typing.Awaitable[dlp.StoredInfoType]], + ]: + raise NotImplementedError() + + @property + def update_stored_info_type( + self, + ) -> typing.Callable[ + [dlp.UpdateStoredInfoTypeRequest], + typing.Union[dlp.StoredInfoType, typing.Awaitable[dlp.StoredInfoType]], + ]: + raise NotImplementedError() + + @property + def get_stored_info_type( + self, + ) -> typing.Callable[ + [dlp.GetStoredInfoTypeRequest], + typing.Union[dlp.StoredInfoType, typing.Awaitable[dlp.StoredInfoType]], + ]: + raise NotImplementedError() + + @property + def list_stored_info_types( + self, + ) -> typing.Callable[ + [dlp.ListStoredInfoTypesRequest], + typing.Union[ + dlp.ListStoredInfoTypesResponse, + typing.Awaitable[dlp.ListStoredInfoTypesResponse], + ], + ]: + raise NotImplementedError() + + @property + def delete_stored_info_type( + self, + ) -> typing.Callable[ + [dlp.DeleteStoredInfoTypeRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def hybrid_inspect_dlp_job( + self, + ) -> typing.Callable[ + [dlp.HybridInspectDlpJobRequest], + typing.Union[ + dlp.HybridInspectResponse, typing.Awaitable[dlp.HybridInspectResponse] + ], + ]: + raise NotImplementedError() + + @property + def finish_dlp_job( + self, + ) -> typing.Callable[ + [dlp.FinishDlpJobRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + +__all__ = ("DlpServiceTransport",) diff --git a/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py b/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py new file mode 100644 index 00000000..2605ae97 --- /dev/null +++ b/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py @@ -0,0 +1,1215 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + + +import grpc # type: ignore + +from google.cloud.dlp_v2.types import dlp +from google.protobuf import empty_pb2 as empty # type: ignore + +from .base import DlpServiceTransport + + +class DlpServiceGrpcTransport(DlpServiceTransport): + """gRPC backend transport for DlpService. + + The Cloud Data Loss Prevention (DLP) API is a service that + allows clients to detect the presence of Personally Identifiable + Information (PII) and other privacy-sensitive data in user- + supplied, unstructured data streams, like text blocks or images. + The service also includes methods for sensitive data redaction + and scheduling of data scans on Google Cloud Platform based data + sets. + To learn more about concepts and find how-to guides see + https://cloud.google.com/dlp/docs/. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "dlp.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If + provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A + callback to provide client SSL certificate bytes and private key + bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` + is None. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + self._stubs = {} # type: Dict[str, Callable] + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + @classmethod + def create_channel( + cls, + host: str = "dlp.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + address (Optionsl[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Sanity check: Only create a new channel if we do not already + # have one. + if not hasattr(self, "_grpc_channel"): + self._grpc_channel = self.create_channel( + self._host, credentials=self._credentials, + ) + + # Return the channel from cache. + return self._grpc_channel + + @property + def inspect_content( + self, + ) -> Callable[[dlp.InspectContentRequest], dlp.InspectContentResponse]: + r"""Return a callable for the inspect content method over gRPC. + + Finds potentially sensitive info in content. + This method has limits on input size, processing time, + and output size. + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + For how to guides, see + https://cloud.google.com/dlp/docs/inspecting-images and + https://cloud.google.com/dlp/docs/inspecting-text, + + Returns: + Callable[[~.InspectContentRequest], + ~.InspectContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "inspect_content" not in self._stubs: + self._stubs["inspect_content"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/InspectContent", + request_serializer=dlp.InspectContentRequest.serialize, + response_deserializer=dlp.InspectContentResponse.deserialize, + ) + return self._stubs["inspect_content"] + + @property + def redact_image( + self, + ) -> Callable[[dlp.RedactImageRequest], dlp.RedactImageResponse]: + r"""Return a callable for the redact image method over gRPC. + + Redacts potentially sensitive info from an image. + This method has limits on input size, processing time, + and output size. See + https://cloud.google.com/dlp/docs/redacting-sensitive- + data-images to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.RedactImageRequest], + ~.RedactImageResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "redact_image" not in self._stubs: + self._stubs["redact_image"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/RedactImage", + request_serializer=dlp.RedactImageRequest.serialize, + response_deserializer=dlp.RedactImageResponse.deserialize, + ) + return self._stubs["redact_image"] + + @property + def deidentify_content( + self, + ) -> Callable[[dlp.DeidentifyContentRequest], dlp.DeidentifyContentResponse]: + r"""Return a callable for the deidentify content method over gRPC. + + De-identifies potentially sensitive info from a + ContentItem. This method has limits on input size and + output size. See + https://cloud.google.com/dlp/docs/deidentify-sensitive- + data to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.DeidentifyContentRequest], + ~.DeidentifyContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "deidentify_content" not in self._stubs: + self._stubs["deidentify_content"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/DeidentifyContent", + request_serializer=dlp.DeidentifyContentRequest.serialize, + response_deserializer=dlp.DeidentifyContentResponse.deserialize, + ) + return self._stubs["deidentify_content"] + + @property + def reidentify_content( + self, + ) -> Callable[[dlp.ReidentifyContentRequest], dlp.ReidentifyContentResponse]: + r"""Return a callable for the reidentify content method over gRPC. + + Re-identifies content that has been de-identified. See + https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example + to learn more. + + Returns: + Callable[[~.ReidentifyContentRequest], + ~.ReidentifyContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "reidentify_content" not in self._stubs: + self._stubs["reidentify_content"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ReidentifyContent", + request_serializer=dlp.ReidentifyContentRequest.serialize, + response_deserializer=dlp.ReidentifyContentResponse.deserialize, + ) + return self._stubs["reidentify_content"] + + @property + def list_info_types( + self, + ) -> Callable[[dlp.ListInfoTypesRequest], dlp.ListInfoTypesResponse]: + r"""Return a callable for the list info types method over gRPC. + + Returns a list of the sensitive information types + that the DLP API supports. See + https://cloud.google.com/dlp/docs/infotypes-reference to + learn more. + + Returns: + Callable[[~.ListInfoTypesRequest], + ~.ListInfoTypesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_info_types" not in self._stubs: + self._stubs["list_info_types"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ListInfoTypes", + request_serializer=dlp.ListInfoTypesRequest.serialize, + response_deserializer=dlp.ListInfoTypesResponse.deserialize, + ) + return self._stubs["list_info_types"] + + @property + def create_inspect_template( + self, + ) -> Callable[[dlp.CreateInspectTemplateRequest], dlp.InspectTemplate]: + r"""Return a callable for the create inspect template method over gRPC. + + Creates an InspectTemplate for re-using frequently + used configuration for inspecting content, images, and + storage. See https://cloud.google.com/dlp/docs/creating- + templates to learn more. + + Returns: + Callable[[~.CreateInspectTemplateRequest], + ~.InspectTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_inspect_template" not in self._stubs: + self._stubs["create_inspect_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/CreateInspectTemplate", + request_serializer=dlp.CreateInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs["create_inspect_template"] + + @property + def update_inspect_template( + self, + ) -> Callable[[dlp.UpdateInspectTemplateRequest], dlp.InspectTemplate]: + r"""Return a callable for the update inspect template method over gRPC. + + Updates the InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.UpdateInspectTemplateRequest], + ~.InspectTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_inspect_template" not in self._stubs: + self._stubs["update_inspect_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/UpdateInspectTemplate", + request_serializer=dlp.UpdateInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs["update_inspect_template"] + + @property + def get_inspect_template( + self, + ) -> Callable[[dlp.GetInspectTemplateRequest], dlp.InspectTemplate]: + r"""Return a callable for the get inspect template method over gRPC. + + Gets an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.GetInspectTemplateRequest], + ~.InspectTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_inspect_template" not in self._stubs: + self._stubs["get_inspect_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/GetInspectTemplate", + request_serializer=dlp.GetInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs["get_inspect_template"] + + @property + def list_inspect_templates( + self, + ) -> Callable[[dlp.ListInspectTemplatesRequest], dlp.ListInspectTemplatesResponse]: + r"""Return a callable for the list inspect templates method over gRPC. + + Lists InspectTemplates. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.ListInspectTemplatesRequest], + ~.ListInspectTemplatesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_inspect_templates" not in self._stubs: + self._stubs["list_inspect_templates"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ListInspectTemplates", + request_serializer=dlp.ListInspectTemplatesRequest.serialize, + response_deserializer=dlp.ListInspectTemplatesResponse.deserialize, + ) + return self._stubs["list_inspect_templates"] + + @property + def delete_inspect_template( + self, + ) -> Callable[[dlp.DeleteInspectTemplateRequest], empty.Empty]: + r"""Return a callable for the delete inspect template method over gRPC. + + Deletes an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.DeleteInspectTemplateRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_inspect_template" not in self._stubs: + self._stubs["delete_inspect_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/DeleteInspectTemplate", + request_serializer=dlp.DeleteInspectTemplateRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_inspect_template"] + + @property + def create_deidentify_template( + self, + ) -> Callable[[dlp.CreateDeidentifyTemplateRequest], dlp.DeidentifyTemplate]: + r"""Return a callable for the create deidentify template method over gRPC. + + Creates a DeidentifyTemplate for re-using frequently + used configuration for de-identifying content, images, + and storage. See + https://cloud.google.com/dlp/docs/creating-templates- + deid to learn more. + + Returns: + Callable[[~.CreateDeidentifyTemplateRequest], + ~.DeidentifyTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_deidentify_template" not in self._stubs: + self._stubs["create_deidentify_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/CreateDeidentifyTemplate", + request_serializer=dlp.CreateDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs["create_deidentify_template"] + + @property + def update_deidentify_template( + self, + ) -> Callable[[dlp.UpdateDeidentifyTemplateRequest], dlp.DeidentifyTemplate]: + r"""Return a callable for the update deidentify template method over gRPC. + + Updates the DeidentifyTemplate. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Returns: + Callable[[~.UpdateDeidentifyTemplateRequest], + ~.DeidentifyTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_deidentify_template" not in self._stubs: + self._stubs["update_deidentify_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/UpdateDeidentifyTemplate", + request_serializer=dlp.UpdateDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs["update_deidentify_template"] + + @property + def get_deidentify_template( + self, + ) -> Callable[[dlp.GetDeidentifyTemplateRequest], dlp.DeidentifyTemplate]: + r"""Return a callable for the get deidentify template method over gRPC. + + Gets a DeidentifyTemplate. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Returns: + Callable[[~.GetDeidentifyTemplateRequest], + ~.DeidentifyTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_deidentify_template" not in self._stubs: + self._stubs["get_deidentify_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/GetDeidentifyTemplate", + request_serializer=dlp.GetDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs["get_deidentify_template"] + + @property + def list_deidentify_templates( + self, + ) -> Callable[ + [dlp.ListDeidentifyTemplatesRequest], dlp.ListDeidentifyTemplatesResponse + ]: + r"""Return a callable for the list deidentify templates method over gRPC. + + Lists DeidentifyTemplates. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Returns: + Callable[[~.ListDeidentifyTemplatesRequest], + ~.ListDeidentifyTemplatesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_deidentify_templates" not in self._stubs: + self._stubs["list_deidentify_templates"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ListDeidentifyTemplates", + request_serializer=dlp.ListDeidentifyTemplatesRequest.serialize, + response_deserializer=dlp.ListDeidentifyTemplatesResponse.deserialize, + ) + return self._stubs["list_deidentify_templates"] + + @property + def delete_deidentify_template( + self, + ) -> Callable[[dlp.DeleteDeidentifyTemplateRequest], empty.Empty]: + r"""Return a callable for the delete deidentify template method over gRPC. + + Deletes a DeidentifyTemplate. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Returns: + Callable[[~.DeleteDeidentifyTemplateRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_deidentify_template" not in self._stubs: + self._stubs["delete_deidentify_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/DeleteDeidentifyTemplate", + request_serializer=dlp.DeleteDeidentifyTemplateRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_deidentify_template"] + + @property + def create_job_trigger( + self, + ) -> Callable[[dlp.CreateJobTriggerRequest], dlp.JobTrigger]: + r"""Return a callable for the create job trigger method over gRPC. + + Creates a job trigger to run DLP actions such as + scanning storage for sensitive information on a set + schedule. See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.CreateJobTriggerRequest], + ~.JobTrigger]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_job_trigger" not in self._stubs: + self._stubs["create_job_trigger"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/CreateJobTrigger", + request_serializer=dlp.CreateJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs["create_job_trigger"] + + @property + def update_job_trigger( + self, + ) -> Callable[[dlp.UpdateJobTriggerRequest], dlp.JobTrigger]: + r"""Return a callable for the update job trigger method over gRPC. + + Updates a job trigger. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Returns: + Callable[[~.UpdateJobTriggerRequest], + ~.JobTrigger]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_job_trigger" not in self._stubs: + self._stubs["update_job_trigger"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/UpdateJobTrigger", + request_serializer=dlp.UpdateJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs["update_job_trigger"] + + @property + def hybrid_inspect_job_trigger( + self, + ) -> Callable[[dlp.HybridInspectJobTriggerRequest], dlp.HybridInspectResponse]: + r"""Return a callable for the hybrid inspect job trigger method over gRPC. + + Inspect hybrid content and store findings to a + trigger. The inspection will be processed + asynchronously. To review the findings monitor the jobs + within the trigger. + Early access feature is in a pre-release state and might + change or have limited support. For more information, + see + https://cloud.google.com/products#product-launch-stages. + + Returns: + Callable[[~.HybridInspectJobTriggerRequest], + ~.HybridInspectResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "hybrid_inspect_job_trigger" not in self._stubs: + self._stubs["hybrid_inspect_job_trigger"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/HybridInspectJobTrigger", + request_serializer=dlp.HybridInspectJobTriggerRequest.serialize, + response_deserializer=dlp.HybridInspectResponse.deserialize, + ) + return self._stubs["hybrid_inspect_job_trigger"] + + @property + def get_job_trigger(self) -> Callable[[dlp.GetJobTriggerRequest], dlp.JobTrigger]: + r"""Return a callable for the get job trigger method over gRPC. + + Gets a job trigger. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Returns: + Callable[[~.GetJobTriggerRequest], + ~.JobTrigger]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_job_trigger" not in self._stubs: + self._stubs["get_job_trigger"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/GetJobTrigger", + request_serializer=dlp.GetJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs["get_job_trigger"] + + @property + def list_job_triggers( + self, + ) -> Callable[[dlp.ListJobTriggersRequest], dlp.ListJobTriggersResponse]: + r"""Return a callable for the list job triggers method over gRPC. + + Lists job triggers. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Returns: + Callable[[~.ListJobTriggersRequest], + ~.ListJobTriggersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_job_triggers" not in self._stubs: + self._stubs["list_job_triggers"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ListJobTriggers", + request_serializer=dlp.ListJobTriggersRequest.serialize, + response_deserializer=dlp.ListJobTriggersResponse.deserialize, + ) + return self._stubs["list_job_triggers"] + + @property + def delete_job_trigger( + self, + ) -> Callable[[dlp.DeleteJobTriggerRequest], empty.Empty]: + r"""Return a callable for the delete job trigger method over gRPC. + + Deletes a job trigger. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Returns: + Callable[[~.DeleteJobTriggerRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_job_trigger" not in self._stubs: + self._stubs["delete_job_trigger"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/DeleteJobTrigger", + request_serializer=dlp.DeleteJobTriggerRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_job_trigger"] + + @property + def activate_job_trigger( + self, + ) -> Callable[[dlp.ActivateJobTriggerRequest], dlp.DlpJob]: + r"""Return a callable for the activate job trigger method over gRPC. + + Activate a job trigger. Causes the immediate execute + of a trigger instead of waiting on the trigger event to + occur. + + Returns: + Callable[[~.ActivateJobTriggerRequest], + ~.DlpJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "activate_job_trigger" not in self._stubs: + self._stubs["activate_job_trigger"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ActivateJobTrigger", + request_serializer=dlp.ActivateJobTriggerRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs["activate_job_trigger"] + + @property + def create_dlp_job(self) -> Callable[[dlp.CreateDlpJobRequest], dlp.DlpJob]: + r"""Return a callable for the create dlp job method over gRPC. + + Creates a new job to inspect storage or calculate + risk metrics. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + When no InfoTypes or CustomInfoTypes are specified in + inspect jobs, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.CreateDlpJobRequest], + ~.DlpJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_dlp_job" not in self._stubs: + self._stubs["create_dlp_job"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/CreateDlpJob", + request_serializer=dlp.CreateDlpJobRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs["create_dlp_job"] + + @property + def list_dlp_jobs( + self, + ) -> Callable[[dlp.ListDlpJobsRequest], dlp.ListDlpJobsResponse]: + r"""Return a callable for the list dlp jobs method over gRPC. + + Lists DlpJobs that match the specified filter in the + request. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.ListDlpJobsRequest], + ~.ListDlpJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_dlp_jobs" not in self._stubs: + self._stubs["list_dlp_jobs"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ListDlpJobs", + request_serializer=dlp.ListDlpJobsRequest.serialize, + response_deserializer=dlp.ListDlpJobsResponse.deserialize, + ) + return self._stubs["list_dlp_jobs"] + + @property + def get_dlp_job(self) -> Callable[[dlp.GetDlpJobRequest], dlp.DlpJob]: + r"""Return a callable for the get dlp job method over gRPC. + + Gets the latest state of a long-running DlpJob. + See https://cloud.google.com/dlp/docs/inspecting-storage + and https://cloud.google.com/dlp/docs/compute-risk- + analysis to learn more. + + Returns: + Callable[[~.GetDlpJobRequest], + ~.DlpJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_dlp_job" not in self._stubs: + self._stubs["get_dlp_job"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/GetDlpJob", + request_serializer=dlp.GetDlpJobRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs["get_dlp_job"] + + @property + def delete_dlp_job(self) -> Callable[[dlp.DeleteDlpJobRequest], empty.Empty]: + r"""Return a callable for the delete dlp job method over gRPC. + + Deletes a long-running DlpJob. This method indicates + that the client is no longer interested in the DlpJob + result. The job will be cancelled if possible. + See https://cloud.google.com/dlp/docs/inspecting-storage + and https://cloud.google.com/dlp/docs/compute-risk- + analysis to learn more. + + Returns: + Callable[[~.DeleteDlpJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_dlp_job" not in self._stubs: + self._stubs["delete_dlp_job"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/DeleteDlpJob", + request_serializer=dlp.DeleteDlpJobRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_dlp_job"] + + @property + def cancel_dlp_job(self) -> Callable[[dlp.CancelDlpJobRequest], empty.Empty]: + r"""Return a callable for the cancel dlp job method over gRPC. + + Starts asynchronous cancellation on a long-running + DlpJob. The server makes a best effort to cancel the + DlpJob, but success is not guaranteed. + See https://cloud.google.com/dlp/docs/inspecting-storage + and https://cloud.google.com/dlp/docs/compute-risk- + analysis to learn more. + + Returns: + Callable[[~.CancelDlpJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_dlp_job" not in self._stubs: + self._stubs["cancel_dlp_job"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/CancelDlpJob", + request_serializer=dlp.CancelDlpJobRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["cancel_dlp_job"] + + @property + def create_stored_info_type( + self, + ) -> Callable[[dlp.CreateStoredInfoTypeRequest], dlp.StoredInfoType]: + r"""Return a callable for the create stored info type method over gRPC. + + Creates a pre-built stored infoType to be used for + inspection. See + https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Returns: + Callable[[~.CreateStoredInfoTypeRequest], + ~.StoredInfoType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_stored_info_type" not in self._stubs: + self._stubs["create_stored_info_type"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/CreateStoredInfoType", + request_serializer=dlp.CreateStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs["create_stored_info_type"] + + @property + def update_stored_info_type( + self, + ) -> Callable[[dlp.UpdateStoredInfoTypeRequest], dlp.StoredInfoType]: + r"""Return a callable for the update stored info type method over gRPC. + + Updates the stored infoType by creating a new + version. The existing version will continue to be used + until the new version is ready. See + https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Returns: + Callable[[~.UpdateStoredInfoTypeRequest], + ~.StoredInfoType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_stored_info_type" not in self._stubs: + self._stubs["update_stored_info_type"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/UpdateStoredInfoType", + request_serializer=dlp.UpdateStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs["update_stored_info_type"] + + @property + def get_stored_info_type( + self, + ) -> Callable[[dlp.GetStoredInfoTypeRequest], dlp.StoredInfoType]: + r"""Return a callable for the get stored info type method over gRPC. + + Gets a stored infoType. + See https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Returns: + Callable[[~.GetStoredInfoTypeRequest], + ~.StoredInfoType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_stored_info_type" not in self._stubs: + self._stubs["get_stored_info_type"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/GetStoredInfoType", + request_serializer=dlp.GetStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs["get_stored_info_type"] + + @property + def list_stored_info_types( + self, + ) -> Callable[[dlp.ListStoredInfoTypesRequest], dlp.ListStoredInfoTypesResponse]: + r"""Return a callable for the list stored info types method over gRPC. + + Lists stored infoTypes. + See https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Returns: + Callable[[~.ListStoredInfoTypesRequest], + ~.ListStoredInfoTypesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_stored_info_types" not in self._stubs: + self._stubs["list_stored_info_types"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ListStoredInfoTypes", + request_serializer=dlp.ListStoredInfoTypesRequest.serialize, + response_deserializer=dlp.ListStoredInfoTypesResponse.deserialize, + ) + return self._stubs["list_stored_info_types"] + + @property + def delete_stored_info_type( + self, + ) -> Callable[[dlp.DeleteStoredInfoTypeRequest], empty.Empty]: + r"""Return a callable for the delete stored info type method over gRPC. + + Deletes a stored infoType. + See https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Returns: + Callable[[~.DeleteStoredInfoTypeRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_stored_info_type" not in self._stubs: + self._stubs["delete_stored_info_type"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/DeleteStoredInfoType", + request_serializer=dlp.DeleteStoredInfoTypeRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_stored_info_type"] + + @property + def hybrid_inspect_dlp_job( + self, + ) -> Callable[[dlp.HybridInspectDlpJobRequest], dlp.HybridInspectResponse]: + r"""Return a callable for the hybrid inspect dlp job method over gRPC. + + Inspect hybrid content and store findings to a job. + To review the findings inspect the job. Inspection will + occur asynchronously. + Early access feature is in a pre-release state and might + change or have limited support. For more information, + see + https://cloud.google.com/products#product-launch-stages. + + Returns: + Callable[[~.HybridInspectDlpJobRequest], + ~.HybridInspectResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "hybrid_inspect_dlp_job" not in self._stubs: + self._stubs["hybrid_inspect_dlp_job"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/HybridInspectDlpJob", + request_serializer=dlp.HybridInspectDlpJobRequest.serialize, + response_deserializer=dlp.HybridInspectResponse.deserialize, + ) + return self._stubs["hybrid_inspect_dlp_job"] + + @property + def finish_dlp_job(self) -> Callable[[dlp.FinishDlpJobRequest], empty.Empty]: + r"""Return a callable for the finish dlp job method over gRPC. + + Finish a running hybrid DlpJob. Triggers the + finalization steps and running of any enabled actions + that have not yet run. Early access feature is in a pre- + release state and might change or have limited support. + For more information, see + https://cloud.google.com/products#product-launch-stages. + + Returns: + Callable[[~.FinishDlpJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "finish_dlp_job" not in self._stubs: + self._stubs["finish_dlp_job"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/FinishDlpJob", + request_serializer=dlp.FinishDlpJobRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["finish_dlp_job"] + + +__all__ = ("DlpServiceGrpcTransport",) diff --git a/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py b/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py new file mode 100644 index 00000000..e1ab0937 --- /dev/null +++ b/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py @@ -0,0 +1,1237 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dlp_v2.types import dlp +from google.protobuf import empty_pb2 as empty # type: ignore + +from .base import DlpServiceTransport +from .grpc import DlpServiceGrpcTransport + + +class DlpServiceGrpcAsyncIOTransport(DlpServiceTransport): + """gRPC AsyncIO backend transport for DlpService. + + The Cloud Data Loss Prevention (DLP) API is a service that + allows clients to detect the presence of Personally Identifiable + Information (PII) and other privacy-sensitive data in user- + supplied, unstructured data streams, like text blocks or images. + The service also includes methods for sensitive data redaction + and scheduling of data scans on Google Cloud Platform based data + sets. + To learn more about concepts and find how-to guides see + https://cloud.google.com/dlp/docs/. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "dlp.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + address (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "dlp.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If + provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A + callback to provide client SSL certificate bytes and private key + bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` + is None. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + self._stubs = {} + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Sanity check: Only create a new channel if we do not already + # have one. + if not hasattr(self, "_grpc_channel"): + self._grpc_channel = self.create_channel( + self._host, credentials=self._credentials, + ) + + # Return the channel from cache. + return self._grpc_channel + + @property + def inspect_content( + self, + ) -> Callable[[dlp.InspectContentRequest], Awaitable[dlp.InspectContentResponse]]: + r"""Return a callable for the inspect content method over gRPC. + + Finds potentially sensitive info in content. + This method has limits on input size, processing time, + and output size. + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + For how to guides, see + https://cloud.google.com/dlp/docs/inspecting-images and + https://cloud.google.com/dlp/docs/inspecting-text, + + Returns: + Callable[[~.InspectContentRequest], + Awaitable[~.InspectContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "inspect_content" not in self._stubs: + self._stubs["inspect_content"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/InspectContent", + request_serializer=dlp.InspectContentRequest.serialize, + response_deserializer=dlp.InspectContentResponse.deserialize, + ) + return self._stubs["inspect_content"] + + @property + def redact_image( + self, + ) -> Callable[[dlp.RedactImageRequest], Awaitable[dlp.RedactImageResponse]]: + r"""Return a callable for the redact image method over gRPC. + + Redacts potentially sensitive info from an image. + This method has limits on input size, processing time, + and output size. See + https://cloud.google.com/dlp/docs/redacting-sensitive- + data-images to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.RedactImageRequest], + Awaitable[~.RedactImageResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "redact_image" not in self._stubs: + self._stubs["redact_image"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/RedactImage", + request_serializer=dlp.RedactImageRequest.serialize, + response_deserializer=dlp.RedactImageResponse.deserialize, + ) + return self._stubs["redact_image"] + + @property + def deidentify_content( + self, + ) -> Callable[ + [dlp.DeidentifyContentRequest], Awaitable[dlp.DeidentifyContentResponse] + ]: + r"""Return a callable for the deidentify content method over gRPC. + + De-identifies potentially sensitive info from a + ContentItem. This method has limits on input size and + output size. See + https://cloud.google.com/dlp/docs/deidentify-sensitive- + data to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.DeidentifyContentRequest], + Awaitable[~.DeidentifyContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "deidentify_content" not in self._stubs: + self._stubs["deidentify_content"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/DeidentifyContent", + request_serializer=dlp.DeidentifyContentRequest.serialize, + response_deserializer=dlp.DeidentifyContentResponse.deserialize, + ) + return self._stubs["deidentify_content"] + + @property + def reidentify_content( + self, + ) -> Callable[ + [dlp.ReidentifyContentRequest], Awaitable[dlp.ReidentifyContentResponse] + ]: + r"""Return a callable for the reidentify content method over gRPC. + + Re-identifies content that has been de-identified. See + https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example + to learn more. + + Returns: + Callable[[~.ReidentifyContentRequest], + Awaitable[~.ReidentifyContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "reidentify_content" not in self._stubs: + self._stubs["reidentify_content"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ReidentifyContent", + request_serializer=dlp.ReidentifyContentRequest.serialize, + response_deserializer=dlp.ReidentifyContentResponse.deserialize, + ) + return self._stubs["reidentify_content"] + + @property + def list_info_types( + self, + ) -> Callable[[dlp.ListInfoTypesRequest], Awaitable[dlp.ListInfoTypesResponse]]: + r"""Return a callable for the list info types method over gRPC. + + Returns a list of the sensitive information types + that the DLP API supports. See + https://cloud.google.com/dlp/docs/infotypes-reference to + learn more. + + Returns: + Callable[[~.ListInfoTypesRequest], + Awaitable[~.ListInfoTypesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_info_types" not in self._stubs: + self._stubs["list_info_types"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ListInfoTypes", + request_serializer=dlp.ListInfoTypesRequest.serialize, + response_deserializer=dlp.ListInfoTypesResponse.deserialize, + ) + return self._stubs["list_info_types"] + + @property + def create_inspect_template( + self, + ) -> Callable[[dlp.CreateInspectTemplateRequest], Awaitable[dlp.InspectTemplate]]: + r"""Return a callable for the create inspect template method over gRPC. + + Creates an InspectTemplate for re-using frequently + used configuration for inspecting content, images, and + storage. See https://cloud.google.com/dlp/docs/creating- + templates to learn more. + + Returns: + Callable[[~.CreateInspectTemplateRequest], + Awaitable[~.InspectTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_inspect_template" not in self._stubs: + self._stubs["create_inspect_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/CreateInspectTemplate", + request_serializer=dlp.CreateInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs["create_inspect_template"] + + @property + def update_inspect_template( + self, + ) -> Callable[[dlp.UpdateInspectTemplateRequest], Awaitable[dlp.InspectTemplate]]: + r"""Return a callable for the update inspect template method over gRPC. + + Updates the InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.UpdateInspectTemplateRequest], + Awaitable[~.InspectTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_inspect_template" not in self._stubs: + self._stubs["update_inspect_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/UpdateInspectTemplate", + request_serializer=dlp.UpdateInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs["update_inspect_template"] + + @property + def get_inspect_template( + self, + ) -> Callable[[dlp.GetInspectTemplateRequest], Awaitable[dlp.InspectTemplate]]: + r"""Return a callable for the get inspect template method over gRPC. + + Gets an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.GetInspectTemplateRequest], + Awaitable[~.InspectTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_inspect_template" not in self._stubs: + self._stubs["get_inspect_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/GetInspectTemplate", + request_serializer=dlp.GetInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs["get_inspect_template"] + + @property + def list_inspect_templates( + self, + ) -> Callable[ + [dlp.ListInspectTemplatesRequest], Awaitable[dlp.ListInspectTemplatesResponse] + ]: + r"""Return a callable for the list inspect templates method over gRPC. + + Lists InspectTemplates. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.ListInspectTemplatesRequest], + Awaitable[~.ListInspectTemplatesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_inspect_templates" not in self._stubs: + self._stubs["list_inspect_templates"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ListInspectTemplates", + request_serializer=dlp.ListInspectTemplatesRequest.serialize, + response_deserializer=dlp.ListInspectTemplatesResponse.deserialize, + ) + return self._stubs["list_inspect_templates"] + + @property + def delete_inspect_template( + self, + ) -> Callable[[dlp.DeleteInspectTemplateRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete inspect template method over gRPC. + + Deletes an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.DeleteInspectTemplateRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_inspect_template" not in self._stubs: + self._stubs["delete_inspect_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/DeleteInspectTemplate", + request_serializer=dlp.DeleteInspectTemplateRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_inspect_template"] + + @property + def create_deidentify_template( + self, + ) -> Callable[ + [dlp.CreateDeidentifyTemplateRequest], Awaitable[dlp.DeidentifyTemplate] + ]: + r"""Return a callable for the create deidentify template method over gRPC. + + Creates a DeidentifyTemplate for re-using frequently + used configuration for de-identifying content, images, + and storage. See + https://cloud.google.com/dlp/docs/creating-templates- + deid to learn more. + + Returns: + Callable[[~.CreateDeidentifyTemplateRequest], + Awaitable[~.DeidentifyTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_deidentify_template" not in self._stubs: + self._stubs["create_deidentify_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/CreateDeidentifyTemplate", + request_serializer=dlp.CreateDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs["create_deidentify_template"] + + @property + def update_deidentify_template( + self, + ) -> Callable[ + [dlp.UpdateDeidentifyTemplateRequest], Awaitable[dlp.DeidentifyTemplate] + ]: + r"""Return a callable for the update deidentify template method over gRPC. + + Updates the DeidentifyTemplate. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Returns: + Callable[[~.UpdateDeidentifyTemplateRequest], + Awaitable[~.DeidentifyTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_deidentify_template" not in self._stubs: + self._stubs["update_deidentify_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/UpdateDeidentifyTemplate", + request_serializer=dlp.UpdateDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs["update_deidentify_template"] + + @property + def get_deidentify_template( + self, + ) -> Callable[ + [dlp.GetDeidentifyTemplateRequest], Awaitable[dlp.DeidentifyTemplate] + ]: + r"""Return a callable for the get deidentify template method over gRPC. + + Gets a DeidentifyTemplate. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Returns: + Callable[[~.GetDeidentifyTemplateRequest], + Awaitable[~.DeidentifyTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_deidentify_template" not in self._stubs: + self._stubs["get_deidentify_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/GetDeidentifyTemplate", + request_serializer=dlp.GetDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs["get_deidentify_template"] + + @property + def list_deidentify_templates( + self, + ) -> Callable[ + [dlp.ListDeidentifyTemplatesRequest], + Awaitable[dlp.ListDeidentifyTemplatesResponse], + ]: + r"""Return a callable for the list deidentify templates method over gRPC. + + Lists DeidentifyTemplates. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Returns: + Callable[[~.ListDeidentifyTemplatesRequest], + Awaitable[~.ListDeidentifyTemplatesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_deidentify_templates" not in self._stubs: + self._stubs["list_deidentify_templates"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ListDeidentifyTemplates", + request_serializer=dlp.ListDeidentifyTemplatesRequest.serialize, + response_deserializer=dlp.ListDeidentifyTemplatesResponse.deserialize, + ) + return self._stubs["list_deidentify_templates"] + + @property + def delete_deidentify_template( + self, + ) -> Callable[[dlp.DeleteDeidentifyTemplateRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete deidentify template method over gRPC. + + Deletes a DeidentifyTemplate. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Returns: + Callable[[~.DeleteDeidentifyTemplateRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_deidentify_template" not in self._stubs: + self._stubs["delete_deidentify_template"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/DeleteDeidentifyTemplate", + request_serializer=dlp.DeleteDeidentifyTemplateRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_deidentify_template"] + + @property + def create_job_trigger( + self, + ) -> Callable[[dlp.CreateJobTriggerRequest], Awaitable[dlp.JobTrigger]]: + r"""Return a callable for the create job trigger method over gRPC. + + Creates a job trigger to run DLP actions such as + scanning storage for sensitive information on a set + schedule. See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.CreateJobTriggerRequest], + Awaitable[~.JobTrigger]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_job_trigger" not in self._stubs: + self._stubs["create_job_trigger"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/CreateJobTrigger", + request_serializer=dlp.CreateJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs["create_job_trigger"] + + @property + def update_job_trigger( + self, + ) -> Callable[[dlp.UpdateJobTriggerRequest], Awaitable[dlp.JobTrigger]]: + r"""Return a callable for the update job trigger method over gRPC. + + Updates a job trigger. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Returns: + Callable[[~.UpdateJobTriggerRequest], + Awaitable[~.JobTrigger]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_job_trigger" not in self._stubs: + self._stubs["update_job_trigger"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/UpdateJobTrigger", + request_serializer=dlp.UpdateJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs["update_job_trigger"] + + @property + def hybrid_inspect_job_trigger( + self, + ) -> Callable[ + [dlp.HybridInspectJobTriggerRequest], Awaitable[dlp.HybridInspectResponse] + ]: + r"""Return a callable for the hybrid inspect job trigger method over gRPC. + + Inspect hybrid content and store findings to a + trigger. The inspection will be processed + asynchronously. To review the findings monitor the jobs + within the trigger. + Early access feature is in a pre-release state and might + change or have limited support. For more information, + see + https://cloud.google.com/products#product-launch-stages. + + Returns: + Callable[[~.HybridInspectJobTriggerRequest], + Awaitable[~.HybridInspectResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "hybrid_inspect_job_trigger" not in self._stubs: + self._stubs["hybrid_inspect_job_trigger"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/HybridInspectJobTrigger", + request_serializer=dlp.HybridInspectJobTriggerRequest.serialize, + response_deserializer=dlp.HybridInspectResponse.deserialize, + ) + return self._stubs["hybrid_inspect_job_trigger"] + + @property + def get_job_trigger( + self, + ) -> Callable[[dlp.GetJobTriggerRequest], Awaitable[dlp.JobTrigger]]: + r"""Return a callable for the get job trigger method over gRPC. + + Gets a job trigger. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Returns: + Callable[[~.GetJobTriggerRequest], + Awaitable[~.JobTrigger]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_job_trigger" not in self._stubs: + self._stubs["get_job_trigger"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/GetJobTrigger", + request_serializer=dlp.GetJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs["get_job_trigger"] + + @property + def list_job_triggers( + self, + ) -> Callable[[dlp.ListJobTriggersRequest], Awaitable[dlp.ListJobTriggersResponse]]: + r"""Return a callable for the list job triggers method over gRPC. + + Lists job triggers. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Returns: + Callable[[~.ListJobTriggersRequest], + Awaitable[~.ListJobTriggersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_job_triggers" not in self._stubs: + self._stubs["list_job_triggers"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ListJobTriggers", + request_serializer=dlp.ListJobTriggersRequest.serialize, + response_deserializer=dlp.ListJobTriggersResponse.deserialize, + ) + return self._stubs["list_job_triggers"] + + @property + def delete_job_trigger( + self, + ) -> Callable[[dlp.DeleteJobTriggerRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete job trigger method over gRPC. + + Deletes a job trigger. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Returns: + Callable[[~.DeleteJobTriggerRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_job_trigger" not in self._stubs: + self._stubs["delete_job_trigger"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/DeleteJobTrigger", + request_serializer=dlp.DeleteJobTriggerRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_job_trigger"] + + @property + def activate_job_trigger( + self, + ) -> Callable[[dlp.ActivateJobTriggerRequest], Awaitable[dlp.DlpJob]]: + r"""Return a callable for the activate job trigger method over gRPC. + + Activate a job trigger. Causes the immediate execute + of a trigger instead of waiting on the trigger event to + occur. + + Returns: + Callable[[~.ActivateJobTriggerRequest], + Awaitable[~.DlpJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "activate_job_trigger" not in self._stubs: + self._stubs["activate_job_trigger"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ActivateJobTrigger", + request_serializer=dlp.ActivateJobTriggerRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs["activate_job_trigger"] + + @property + def create_dlp_job( + self, + ) -> Callable[[dlp.CreateDlpJobRequest], Awaitable[dlp.DlpJob]]: + r"""Return a callable for the create dlp job method over gRPC. + + Creates a new job to inspect storage or calculate + risk metrics. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + When no InfoTypes or CustomInfoTypes are specified in + inspect jobs, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.CreateDlpJobRequest], + Awaitable[~.DlpJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_dlp_job" not in self._stubs: + self._stubs["create_dlp_job"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/CreateDlpJob", + request_serializer=dlp.CreateDlpJobRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs["create_dlp_job"] + + @property + def list_dlp_jobs( + self, + ) -> Callable[[dlp.ListDlpJobsRequest], Awaitable[dlp.ListDlpJobsResponse]]: + r"""Return a callable for the list dlp jobs method over gRPC. + + Lists DlpJobs that match the specified filter in the + request. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.ListDlpJobsRequest], + Awaitable[~.ListDlpJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_dlp_jobs" not in self._stubs: + self._stubs["list_dlp_jobs"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ListDlpJobs", + request_serializer=dlp.ListDlpJobsRequest.serialize, + response_deserializer=dlp.ListDlpJobsResponse.deserialize, + ) + return self._stubs["list_dlp_jobs"] + + @property + def get_dlp_job(self) -> Callable[[dlp.GetDlpJobRequest], Awaitable[dlp.DlpJob]]: + r"""Return a callable for the get dlp job method over gRPC. + + Gets the latest state of a long-running DlpJob. + See https://cloud.google.com/dlp/docs/inspecting-storage + and https://cloud.google.com/dlp/docs/compute-risk- + analysis to learn more. + + Returns: + Callable[[~.GetDlpJobRequest], + Awaitable[~.DlpJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_dlp_job" not in self._stubs: + self._stubs["get_dlp_job"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/GetDlpJob", + request_serializer=dlp.GetDlpJobRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs["get_dlp_job"] + + @property + def delete_dlp_job( + self, + ) -> Callable[[dlp.DeleteDlpJobRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete dlp job method over gRPC. + + Deletes a long-running DlpJob. This method indicates + that the client is no longer interested in the DlpJob + result. The job will be cancelled if possible. + See https://cloud.google.com/dlp/docs/inspecting-storage + and https://cloud.google.com/dlp/docs/compute-risk- + analysis to learn more. + + Returns: + Callable[[~.DeleteDlpJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_dlp_job" not in self._stubs: + self._stubs["delete_dlp_job"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/DeleteDlpJob", + request_serializer=dlp.DeleteDlpJobRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_dlp_job"] + + @property + def cancel_dlp_job( + self, + ) -> Callable[[dlp.CancelDlpJobRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the cancel dlp job method over gRPC. + + Starts asynchronous cancellation on a long-running + DlpJob. The server makes a best effort to cancel the + DlpJob, but success is not guaranteed. + See https://cloud.google.com/dlp/docs/inspecting-storage + and https://cloud.google.com/dlp/docs/compute-risk- + analysis to learn more. + + Returns: + Callable[[~.CancelDlpJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_dlp_job" not in self._stubs: + self._stubs["cancel_dlp_job"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/CancelDlpJob", + request_serializer=dlp.CancelDlpJobRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["cancel_dlp_job"] + + @property + def create_stored_info_type( + self, + ) -> Callable[[dlp.CreateStoredInfoTypeRequest], Awaitable[dlp.StoredInfoType]]: + r"""Return a callable for the create stored info type method over gRPC. + + Creates a pre-built stored infoType to be used for + inspection. See + https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Returns: + Callable[[~.CreateStoredInfoTypeRequest], + Awaitable[~.StoredInfoType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_stored_info_type" not in self._stubs: + self._stubs["create_stored_info_type"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/CreateStoredInfoType", + request_serializer=dlp.CreateStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs["create_stored_info_type"] + + @property + def update_stored_info_type( + self, + ) -> Callable[[dlp.UpdateStoredInfoTypeRequest], Awaitable[dlp.StoredInfoType]]: + r"""Return a callable for the update stored info type method over gRPC. + + Updates the stored infoType by creating a new + version. The existing version will continue to be used + until the new version is ready. See + https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Returns: + Callable[[~.UpdateStoredInfoTypeRequest], + Awaitable[~.StoredInfoType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_stored_info_type" not in self._stubs: + self._stubs["update_stored_info_type"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/UpdateStoredInfoType", + request_serializer=dlp.UpdateStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs["update_stored_info_type"] + + @property + def get_stored_info_type( + self, + ) -> Callable[[dlp.GetStoredInfoTypeRequest], Awaitable[dlp.StoredInfoType]]: + r"""Return a callable for the get stored info type method over gRPC. + + Gets a stored infoType. + See https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Returns: + Callable[[~.GetStoredInfoTypeRequest], + Awaitable[~.StoredInfoType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_stored_info_type" not in self._stubs: + self._stubs["get_stored_info_type"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/GetStoredInfoType", + request_serializer=dlp.GetStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs["get_stored_info_type"] + + @property + def list_stored_info_types( + self, + ) -> Callable[ + [dlp.ListStoredInfoTypesRequest], Awaitable[dlp.ListStoredInfoTypesResponse] + ]: + r"""Return a callable for the list stored info types method over gRPC. + + Lists stored infoTypes. + See https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Returns: + Callable[[~.ListStoredInfoTypesRequest], + Awaitable[~.ListStoredInfoTypesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_stored_info_types" not in self._stubs: + self._stubs["list_stored_info_types"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/ListStoredInfoTypes", + request_serializer=dlp.ListStoredInfoTypesRequest.serialize, + response_deserializer=dlp.ListStoredInfoTypesResponse.deserialize, + ) + return self._stubs["list_stored_info_types"] + + @property + def delete_stored_info_type( + self, + ) -> Callable[[dlp.DeleteStoredInfoTypeRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete stored info type method over gRPC. + + Deletes a stored infoType. + See https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Returns: + Callable[[~.DeleteStoredInfoTypeRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_stored_info_type" not in self._stubs: + self._stubs["delete_stored_info_type"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/DeleteStoredInfoType", + request_serializer=dlp.DeleteStoredInfoTypeRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_stored_info_type"] + + @property + def hybrid_inspect_dlp_job( + self, + ) -> Callable[ + [dlp.HybridInspectDlpJobRequest], Awaitable[dlp.HybridInspectResponse] + ]: + r"""Return a callable for the hybrid inspect dlp job method over gRPC. + + Inspect hybrid content and store findings to a job. + To review the findings inspect the job. Inspection will + occur asynchronously. + Early access feature is in a pre-release state and might + change or have limited support. For more information, + see + https://cloud.google.com/products#product-launch-stages. + + Returns: + Callable[[~.HybridInspectDlpJobRequest], + Awaitable[~.HybridInspectResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "hybrid_inspect_dlp_job" not in self._stubs: + self._stubs["hybrid_inspect_dlp_job"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/HybridInspectDlpJob", + request_serializer=dlp.HybridInspectDlpJobRequest.serialize, + response_deserializer=dlp.HybridInspectResponse.deserialize, + ) + return self._stubs["hybrid_inspect_dlp_job"] + + @property + def finish_dlp_job( + self, + ) -> Callable[[dlp.FinishDlpJobRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the finish dlp job method over gRPC. + + Finish a running hybrid DlpJob. Triggers the + finalization steps and running of any enabled actions + that have not yet run. Early access feature is in a pre- + release state and might change or have limited support. + For more information, see + https://cloud.google.com/products#product-launch-stages. + + Returns: + Callable[[~.FinishDlpJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "finish_dlp_job" not in self._stubs: + self._stubs["finish_dlp_job"] = self.grpc_channel.unary_unary( + "/google.privacy.dlp.v2.DlpService/FinishDlpJob", + request_serializer=dlp.FinishDlpJobRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["finish_dlp_job"] + + +__all__ = ("DlpServiceGrpcAsyncIOTransport",) diff --git a/google/cloud/dlp_v2/types.py b/google/cloud/dlp_v2/types.py deleted file mode 100644 index 222848dc..00000000 --- a/google/cloud/dlp_v2/types.py +++ /dev/null @@ -1,61 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import absolute_import -import sys - -from google.api_core.protobuf_helpers import get_messages - -from google.cloud.dlp_v2.proto import dlp_pb2 -from google.cloud.dlp_v2.proto import storage_pb2 -from google.protobuf import any_pb2 -from google.protobuf import duration_pb2 -from google.protobuf import empty_pb2 -from google.protobuf import field_mask_pb2 -from google.protobuf import timestamp_pb2 -from google.rpc import status_pb2 -from google.type import date_pb2 -from google.type import timeofday_pb2 - - -_shared_modules = [ - any_pb2, - duration_pb2, - empty_pb2, - field_mask_pb2, - timestamp_pb2, - status_pb2, - date_pb2, - timeofday_pb2, -] - -_local_modules = [dlp_pb2, storage_pb2] - -names = [] - -for module in _shared_modules: # pragma: NO COVER - for name, message in get_messages(module).items(): - setattr(sys.modules[__name__], name, message) - names.append(name) -for module in _local_modules: - for name, message in get_messages(module).items(): - message.__module__ = "google.cloud.dlp_v2.types" - setattr(sys.modules[__name__], name, message) - names.append(name) - - -__all__ = tuple(sorted(names)) diff --git a/google/cloud/dlp_v2/types/__init__.py b/google/cloud/dlp_v2/types/__init__.py new file mode 100644 index 00000000..32b59e7f --- /dev/null +++ b/google/cloud/dlp_v2/types/__init__.py @@ -0,0 +1,317 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .storage import ( + InfoType, + StoredType, + CustomInfoType, + FieldId, + PartitionId, + KindExpression, + DatastoreOptions, + CloudStorageRegexFileSet, + CloudStorageOptions, + CloudStorageFileSet, + CloudStoragePath, + BigQueryOptions, + StorageConfig, + HybridOptions, + BigQueryKey, + DatastoreKey, + Key, + RecordKey, + BigQueryTable, + BigQueryField, + EntityId, + TableOptions, +) +from .dlp import ( + ExcludeInfoTypes, + ExclusionRule, + InspectionRule, + InspectionRuleSet, + InspectConfig, + ByteContentItem, + ContentItem, + Table, + InspectResult, + Finding, + Location, + ContentLocation, + MetadataLocation, + StorageMetadataLabel, + DocumentLocation, + RecordLocation, + TableLocation, + Container, + Range, + ImageLocation, + BoundingBox, + RedactImageRequest, + Color, + RedactImageResponse, + DeidentifyContentRequest, + DeidentifyContentResponse, + ReidentifyContentRequest, + ReidentifyContentResponse, + InspectContentRequest, + InspectContentResponse, + OutputStorageConfig, + InfoTypeStats, + InspectDataSourceDetails, + HybridInspectStatistics, + InfoTypeDescription, + ListInfoTypesRequest, + ListInfoTypesResponse, + RiskAnalysisJobConfig, + QuasiId, + StatisticalTable, + PrivacyMetric, + AnalyzeDataSourceRiskDetails, + ValueFrequency, + Value, + QuoteInfo, + DateTime, + DeidentifyConfig, + TransformationErrorHandling, + PrimitiveTransformation, + TimePartConfig, + CryptoHashConfig, + CryptoDeterministicConfig, + ReplaceValueConfig, + ReplaceWithInfoTypeConfig, + RedactConfig, + CharsToIgnore, + CharacterMaskConfig, + FixedSizeBucketingConfig, + BucketingConfig, + CryptoReplaceFfxFpeConfig, + CryptoKey, + TransientCryptoKey, + UnwrappedCryptoKey, + KmsWrappedCryptoKey, + DateShiftConfig, + InfoTypeTransformations, + FieldTransformation, + RecordTransformations, + RecordSuppression, + RecordCondition, + TransformationOverview, + TransformationSummary, + Schedule, + Manual, + InspectTemplate, + DeidentifyTemplate, + Error, + JobTrigger, + Action, + CreateInspectTemplateRequest, + UpdateInspectTemplateRequest, + GetInspectTemplateRequest, + ListInspectTemplatesRequest, + ListInspectTemplatesResponse, + DeleteInspectTemplateRequest, + CreateJobTriggerRequest, + ActivateJobTriggerRequest, + UpdateJobTriggerRequest, + GetJobTriggerRequest, + CreateDlpJobRequest, + ListJobTriggersRequest, + ListJobTriggersResponse, + DeleteJobTriggerRequest, + InspectJobConfig, + DlpJob, + GetDlpJobRequest, + ListDlpJobsRequest, + ListDlpJobsResponse, + CancelDlpJobRequest, + FinishDlpJobRequest, + DeleteDlpJobRequest, + CreateDeidentifyTemplateRequest, + UpdateDeidentifyTemplateRequest, + GetDeidentifyTemplateRequest, + ListDeidentifyTemplatesRequest, + ListDeidentifyTemplatesResponse, + DeleteDeidentifyTemplateRequest, + LargeCustomDictionaryConfig, + LargeCustomDictionaryStats, + StoredInfoTypeConfig, + StoredInfoTypeStats, + StoredInfoTypeVersion, + StoredInfoType, + CreateStoredInfoTypeRequest, + UpdateStoredInfoTypeRequest, + GetStoredInfoTypeRequest, + ListStoredInfoTypesRequest, + ListStoredInfoTypesResponse, + DeleteStoredInfoTypeRequest, + HybridInspectJobTriggerRequest, + HybridInspectDlpJobRequest, + HybridContentItem, + HybridFindingDetails, + HybridInspectResponse, +) + + +__all__ = ( + "InfoType", + "StoredType", + "CustomInfoType", + "FieldId", + "PartitionId", + "KindExpression", + "DatastoreOptions", + "CloudStorageRegexFileSet", + "CloudStorageOptions", + "CloudStorageFileSet", + "CloudStoragePath", + "BigQueryOptions", + "StorageConfig", + "HybridOptions", + "BigQueryKey", + "DatastoreKey", + "Key", + "RecordKey", + "BigQueryTable", + "BigQueryField", + "EntityId", + "TableOptions", + "ExcludeInfoTypes", + "ExclusionRule", + "InspectionRule", + "InspectionRuleSet", + "InspectConfig", + "ByteContentItem", + "ContentItem", + "Table", + "InspectResult", + "Finding", + "Location", + "ContentLocation", + "MetadataLocation", + "StorageMetadataLabel", + "DocumentLocation", + "RecordLocation", + "TableLocation", + "Container", + "Range", + "ImageLocation", + "BoundingBox", + "RedactImageRequest", + "Color", + "RedactImageResponse", + "DeidentifyContentRequest", + "DeidentifyContentResponse", + "ReidentifyContentRequest", + "ReidentifyContentResponse", + "InspectContentRequest", + "InspectContentResponse", + "OutputStorageConfig", + "InfoTypeStats", + "InspectDataSourceDetails", + "HybridInspectStatistics", + "InfoTypeDescription", + "ListInfoTypesRequest", + "ListInfoTypesResponse", + "RiskAnalysisJobConfig", + "QuasiId", + "StatisticalTable", + "PrivacyMetric", + "AnalyzeDataSourceRiskDetails", + "ValueFrequency", + "Value", + "QuoteInfo", + "DateTime", + "DeidentifyConfig", + "TransformationErrorHandling", + "PrimitiveTransformation", + "TimePartConfig", + "CryptoHashConfig", + "CryptoDeterministicConfig", + "ReplaceValueConfig", + "ReplaceWithInfoTypeConfig", + "RedactConfig", + "CharsToIgnore", + "CharacterMaskConfig", + "FixedSizeBucketingConfig", + "BucketingConfig", + "CryptoReplaceFfxFpeConfig", + "CryptoKey", + "TransientCryptoKey", + "UnwrappedCryptoKey", + "KmsWrappedCryptoKey", + "DateShiftConfig", + "InfoTypeTransformations", + "FieldTransformation", + "RecordTransformations", + "RecordSuppression", + "RecordCondition", + "TransformationOverview", + "TransformationSummary", + "Schedule", + "Manual", + "InspectTemplate", + "DeidentifyTemplate", + "Error", + "JobTrigger", + "Action", + "CreateInspectTemplateRequest", + "UpdateInspectTemplateRequest", + "GetInspectTemplateRequest", + "ListInspectTemplatesRequest", + "ListInspectTemplatesResponse", + "DeleteInspectTemplateRequest", + "CreateJobTriggerRequest", + "ActivateJobTriggerRequest", + "UpdateJobTriggerRequest", + "GetJobTriggerRequest", + "CreateDlpJobRequest", + "ListJobTriggersRequest", + "ListJobTriggersResponse", + "DeleteJobTriggerRequest", + "InspectJobConfig", + "DlpJob", + "GetDlpJobRequest", + "ListDlpJobsRequest", + "ListDlpJobsResponse", + "CancelDlpJobRequest", + "FinishDlpJobRequest", + "DeleteDlpJobRequest", + "CreateDeidentifyTemplateRequest", + "UpdateDeidentifyTemplateRequest", + "GetDeidentifyTemplateRequest", + "ListDeidentifyTemplatesRequest", + "ListDeidentifyTemplatesResponse", + "DeleteDeidentifyTemplateRequest", + "LargeCustomDictionaryConfig", + "LargeCustomDictionaryStats", + "StoredInfoTypeConfig", + "StoredInfoTypeStats", + "StoredInfoTypeVersion", + "StoredInfoType", + "CreateStoredInfoTypeRequest", + "UpdateStoredInfoTypeRequest", + "GetStoredInfoTypeRequest", + "ListStoredInfoTypesRequest", + "ListStoredInfoTypesResponse", + "DeleteStoredInfoTypeRequest", + "HybridInspectJobTriggerRequest", + "HybridInspectDlpJobRequest", + "HybridContentItem", + "HybridFindingDetails", + "HybridInspectResponse", +) diff --git a/google/cloud/dlp_v2/types/dlp.py b/google/cloud/dlp_v2/types/dlp.py new file mode 100644 index 00000000..521dd1e6 --- /dev/null +++ b/google/cloud/dlp_v2/types/dlp.py @@ -0,0 +1,5029 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.cloud.dlp_v2.types import storage +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import empty_pb2 as empty # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.rpc import status_pb2 as gr_status # type: ignore +from google.type import date_pb2 as gt_date # type: ignore +from google.type import dayofweek_pb2 as dayofweek # type: ignore +from google.type import timeofday_pb2 as timeofday # type: ignore + + +__protobuf__ = proto.module( + package="google.privacy.dlp.v2", + manifest={ + "RelationalOperator", + "MatchingType", + "ContentOption", + "MetadataType", + "InfoTypeSupportedBy", + "DlpJobType", + "StoredInfoTypeState", + "ExcludeInfoTypes", + "ExclusionRule", + "InspectionRule", + "InspectionRuleSet", + "InspectConfig", + "ByteContentItem", + "ContentItem", + "Table", + "InspectResult", + "Finding", + "Location", + "ContentLocation", + "MetadataLocation", + "StorageMetadataLabel", + "DocumentLocation", + "RecordLocation", + "TableLocation", + "Container", + "Range", + "ImageLocation", + "BoundingBox", + "RedactImageRequest", + "Color", + "RedactImageResponse", + "DeidentifyContentRequest", + "DeidentifyContentResponse", + "ReidentifyContentRequest", + "ReidentifyContentResponse", + "InspectContentRequest", + "InspectContentResponse", + "OutputStorageConfig", + "InfoTypeStats", + "InspectDataSourceDetails", + "HybridInspectStatistics", + "InfoTypeDescription", + "ListInfoTypesRequest", + "ListInfoTypesResponse", + "RiskAnalysisJobConfig", + "QuasiId", + "StatisticalTable", + "PrivacyMetric", + "AnalyzeDataSourceRiskDetails", + "ValueFrequency", + "Value", + "QuoteInfo", + "DateTime", + "DeidentifyConfig", + "TransformationErrorHandling", + "PrimitiveTransformation", + "TimePartConfig", + "CryptoHashConfig", + "CryptoDeterministicConfig", + "ReplaceValueConfig", + "ReplaceWithInfoTypeConfig", + "RedactConfig", + "CharsToIgnore", + "CharacterMaskConfig", + "FixedSizeBucketingConfig", + "BucketingConfig", + "CryptoReplaceFfxFpeConfig", + "CryptoKey", + "TransientCryptoKey", + "UnwrappedCryptoKey", + "KmsWrappedCryptoKey", + "DateShiftConfig", + "InfoTypeTransformations", + "FieldTransformation", + "RecordTransformations", + "RecordSuppression", + "RecordCondition", + "TransformationOverview", + "TransformationSummary", + "Schedule", + "Manual", + "InspectTemplate", + "DeidentifyTemplate", + "Error", + "JobTrigger", + "Action", + "CreateInspectTemplateRequest", + "UpdateInspectTemplateRequest", + "GetInspectTemplateRequest", + "ListInspectTemplatesRequest", + "ListInspectTemplatesResponse", + "DeleteInspectTemplateRequest", + "CreateJobTriggerRequest", + "ActivateJobTriggerRequest", + "UpdateJobTriggerRequest", + "GetJobTriggerRequest", + "CreateDlpJobRequest", + "ListJobTriggersRequest", + "ListJobTriggersResponse", + "DeleteJobTriggerRequest", + "InspectJobConfig", + "DlpJob", + "GetDlpJobRequest", + "ListDlpJobsRequest", + "ListDlpJobsResponse", + "CancelDlpJobRequest", + "FinishDlpJobRequest", + "DeleteDlpJobRequest", + "CreateDeidentifyTemplateRequest", + "UpdateDeidentifyTemplateRequest", + "GetDeidentifyTemplateRequest", + "ListDeidentifyTemplatesRequest", + "ListDeidentifyTemplatesResponse", + "DeleteDeidentifyTemplateRequest", + "LargeCustomDictionaryConfig", + "LargeCustomDictionaryStats", + "StoredInfoTypeConfig", + "StoredInfoTypeStats", + "StoredInfoTypeVersion", + "StoredInfoType", + "CreateStoredInfoTypeRequest", + "UpdateStoredInfoTypeRequest", + "GetStoredInfoTypeRequest", + "ListStoredInfoTypesRequest", + "ListStoredInfoTypesResponse", + "DeleteStoredInfoTypeRequest", + "HybridInspectJobTriggerRequest", + "HybridInspectDlpJobRequest", + "HybridContentItem", + "HybridFindingDetails", + "HybridInspectResponse", + }, +) + + +class RelationalOperator(proto.Enum): + r"""Operators available for comparing the value of fields.""" + RELATIONAL_OPERATOR_UNSPECIFIED = 0 + EQUAL_TO = 1 + NOT_EQUAL_TO = 2 + GREATER_THAN = 3 + LESS_THAN = 4 + GREATER_THAN_OR_EQUALS = 5 + LESS_THAN_OR_EQUALS = 6 + EXISTS = 7 + + +class MatchingType(proto.Enum): + r"""Type of the match which can be applied to different ways of + matching, like Dictionary, regular expression and intersecting + with findings of another info type. + """ + MATCHING_TYPE_UNSPECIFIED = 0 + MATCHING_TYPE_FULL_MATCH = 1 + MATCHING_TYPE_PARTIAL_MATCH = 2 + MATCHING_TYPE_INVERSE_MATCH = 3 + + +class ContentOption(proto.Enum): + r"""Options describing which parts of the provided content should + be scanned. + """ + CONTENT_UNSPECIFIED = 0 + CONTENT_TEXT = 1 + CONTENT_IMAGE = 2 + + +class MetadataType(proto.Enum): + r"""Type of metadata containing the finding.""" + METADATATYPE_UNSPECIFIED = 0 + STORAGE_METADATA = 2 + + +class InfoTypeSupportedBy(proto.Enum): + r"""Parts of the APIs which use certain infoTypes.""" + ENUM_TYPE_UNSPECIFIED = 0 + INSPECT = 1 + RISK_ANALYSIS = 2 + + +class DlpJobType(proto.Enum): + r"""An enum to represent the various types of DLP jobs.""" + DLP_JOB_TYPE_UNSPECIFIED = 0 + INSPECT_JOB = 1 + RISK_ANALYSIS_JOB = 2 + + +class StoredInfoTypeState(proto.Enum): + r"""State of a StoredInfoType version.""" + STORED_INFO_TYPE_STATE_UNSPECIFIED = 0 + PENDING = 1 + READY = 2 + FAILED = 3 + INVALID = 4 + + +class ExcludeInfoTypes(proto.Message): + r"""List of exclude infoTypes. + + Attributes: + info_types (Sequence[~.storage.InfoType]): + InfoType list in ExclusionRule rule drops a finding when it + overlaps or contained within with a finding of an infoType + from this list. For example, for + ``InspectionRuleSet.info_types`` containing + "PHONE_NUMBER"``and``\ exclusion_rule\ ``containing``\ exclude_info_types.info_types\` + with "EMAIL_ADDRESS" the phone number findings are dropped + if they overlap with EMAIL_ADDRESS finding. That leads to + "555-222-2222@example.org" to generate only a single + finding, namely email address. + """ + + info_types = proto.RepeatedField(proto.MESSAGE, number=1, message=storage.InfoType,) + + +class ExclusionRule(proto.Message): + r"""The rule that specifies conditions when findings of infoTypes + specified in ``InspectionRuleSet`` are removed from results. + + Attributes: + dictionary (~.storage.CustomInfoType.Dictionary): + Dictionary which defines the rule. + regex (~.storage.CustomInfoType.Regex): + Regular expression which defines the rule. + exclude_info_types (~.dlp.ExcludeInfoTypes): + Set of infoTypes for which findings would + affect this rule. + matching_type (~.dlp.MatchingType): + How the rule is applied, see MatchingType + documentation for details. + """ + + dictionary = proto.Field( + proto.MESSAGE, + number=1, + oneof="type", + message=storage.CustomInfoType.Dictionary, + ) + + regex = proto.Field( + proto.MESSAGE, number=2, oneof="type", message=storage.CustomInfoType.Regex, + ) + + exclude_info_types = proto.Field( + proto.MESSAGE, number=3, oneof="type", message=ExcludeInfoTypes, + ) + + matching_type = proto.Field(proto.ENUM, number=4, enum="MatchingType",) + + +class InspectionRule(proto.Message): + r"""A single inspection rule to be applied to infoTypes, specified in + ``InspectionRuleSet``. + + Attributes: + hotword_rule (~.storage.CustomInfoType.DetectionRule.HotwordRule): + Hotword-based detection rule. + exclusion_rule (~.dlp.ExclusionRule): + Exclusion rule. + """ + + hotword_rule = proto.Field( + proto.MESSAGE, + number=1, + oneof="type", + message=storage.CustomInfoType.DetectionRule.HotwordRule, + ) + + exclusion_rule = proto.Field( + proto.MESSAGE, number=2, oneof="type", message=ExclusionRule, + ) + + +class InspectionRuleSet(proto.Message): + r"""Rule set for modifying a set of infoTypes to alter behavior + under certain circumstances, depending on the specific details + of the rules within the set. + + Attributes: + info_types (Sequence[~.storage.InfoType]): + List of infoTypes this rule set is applied + to. + rules (Sequence[~.dlp.InspectionRule]): + Set of rules to be applied to infoTypes. The + rules are applied in order. + """ + + info_types = proto.RepeatedField(proto.MESSAGE, number=1, message=storage.InfoType,) + + rules = proto.RepeatedField(proto.MESSAGE, number=2, message=InspectionRule,) + + +class InspectConfig(proto.Message): + r"""Configuration description of the scanning process. When used with + redactContent only info_types and min_likelihood are currently used. + + Attributes: + info_types (Sequence[~.storage.InfoType]): + Restricts what info_types to look for. The values must + correspond to InfoType values returned by ListInfoTypes or + listed at + https://cloud.google.com/dlp/docs/infotypes-reference. + + When no InfoTypes or CustomInfoTypes are specified in a + request, the system may automatically choose what detectors + to run. By default this may be all types, but may change + over time as detectors are updated. + + If you need precise control and predictability as to what + detectors are run you should specify specific InfoTypes + listed in the reference, otherwise a default list will be + used, which may change over time. + min_likelihood (~.storage.Likelihood): + Only returns findings equal or above this + threshold. The default is POSSIBLE. + See https://cloud.google.com/dlp/docs/likelihood + to learn more. + limits (~.dlp.InspectConfig.FindingLimits): + Configuration to control the number of + findings returned. + include_quote (bool): + When true, a contextual quote from the data + that triggered a finding is included in the + response; see Finding.quote. + exclude_info_types (bool): + When true, excludes type information of the + findings. + custom_info_types (Sequence[~.storage.CustomInfoType]): + CustomInfoTypes provided by the user. See + https://cloud.google.com/dlp/docs/creating- + custom-infotypes to learn more. + content_options (Sequence[~.dlp.ContentOption]): + List of options defining data content to + scan. If empty, text, images, and other content + will be included. + rule_set (Sequence[~.dlp.InspectionRuleSet]): + Set of rules to apply to the findings for + this InspectConfig. Exclusion rules, contained + in the set are executed in the end, other rules + are executed in the order they are specified for + each info type. + """ + + class FindingLimits(proto.Message): + r"""Configuration to control the number of findings returned. + + Attributes: + max_findings_per_item (int): + Max number of findings that will be returned for each item + scanned. When set within ``InspectJobConfig``, the maximum + returned is 2000 regardless if this is set higher. When set + within ``InspectContentRequest``, this field is ignored. + max_findings_per_request (int): + Max number of findings that will be returned per + request/job. When set within ``InspectContentRequest``, the + maximum returned is 2000 regardless if this is set higher. + max_findings_per_info_type (Sequence[~.dlp.InspectConfig.FindingLimits.InfoTypeLimit]): + Configuration of findings limit given for + specified infoTypes. + """ + + class InfoTypeLimit(proto.Message): + r"""Max findings configuration per infoType, per content item or + long running DlpJob. + + Attributes: + info_type (~.storage.InfoType): + Type of information the findings limit applies to. Only one + limit per info_type should be provided. If InfoTypeLimit + does not have an info_type, the DLP API applies the limit + against all info_types that are found but not specified in + another InfoTypeLimit. + max_findings (int): + Max findings limit for the given infoType. + """ + + info_type = proto.Field(proto.MESSAGE, number=1, message=storage.InfoType,) + + max_findings = proto.Field(proto.INT32, number=2) + + max_findings_per_item = proto.Field(proto.INT32, number=1) + + max_findings_per_request = proto.Field(proto.INT32, number=2) + + max_findings_per_info_type = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="InspectConfig.FindingLimits.InfoTypeLimit", + ) + + info_types = proto.RepeatedField(proto.MESSAGE, number=1, message=storage.InfoType,) + + min_likelihood = proto.Field(proto.ENUM, number=2, enum=storage.Likelihood,) + + limits = proto.Field(proto.MESSAGE, number=3, message=FindingLimits,) + + include_quote = proto.Field(proto.BOOL, number=4) + + exclude_info_types = proto.Field(proto.BOOL, number=5) + + custom_info_types = proto.RepeatedField( + proto.MESSAGE, number=6, message=storage.CustomInfoType, + ) + + content_options = proto.RepeatedField(proto.ENUM, number=8, enum="ContentOption",) + + rule_set = proto.RepeatedField(proto.MESSAGE, number=10, message=InspectionRuleSet,) + + +class ByteContentItem(proto.Message): + r"""Container for bytes to inspect or redact. + + Attributes: + type (~.dlp.ByteContentItem.BytesType): + The type of data stored in the bytes string. Default will be + TEXT_UTF8. + data (bytes): + Content data to inspect or redact. + """ + + class BytesType(proto.Enum): + r"""The type of data being sent for inspection.""" + BYTES_TYPE_UNSPECIFIED = 0 + IMAGE = 6 + IMAGE_JPEG = 1 + IMAGE_BMP = 2 + IMAGE_PNG = 3 + IMAGE_SVG = 4 + TEXT_UTF8 = 5 + WORD_DOCUMENT = 7 + PDF = 8 + AVRO = 11 + CSV = 12 + TSV = 13 + + type = proto.Field(proto.ENUM, number=1, enum=BytesType,) + + data = proto.Field(proto.BYTES, number=2) + + +class ContentItem(proto.Message): + r"""Container structure for the content to inspect. + + Attributes: + value (str): + String data to inspect or redact. + table (~.dlp.Table): + Structured content for inspection. See + https://cloud.google.com/dlp/docs/inspecting-text#inspecting_a_table + to learn more. + byte_item (~.dlp.ByteContentItem): + Content data to inspect or redact. Replaces ``type`` and + ``data``. + """ + + value = proto.Field(proto.STRING, number=3, oneof="data_item") + + table = proto.Field(proto.MESSAGE, number=4, oneof="data_item", message="Table",) + + byte_item = proto.Field( + proto.MESSAGE, number=5, oneof="data_item", message=ByteContentItem, + ) + + +class Table(proto.Message): + r"""Structured content to inspect. Up to 50,000 ``Value``\ s per request + allowed. See + https://cloud.google.com/dlp/docs/inspecting-text#inspecting_a_table + to learn more. + + Attributes: + headers (Sequence[~.storage.FieldId]): + Headers of the table. + rows (Sequence[~.dlp.Table.Row]): + Rows of the table. + """ + + class Row(proto.Message): + r"""Values of the row. + + Attributes: + values (Sequence[~.dlp.Value]): + Individual cells. + """ + + values = proto.RepeatedField(proto.MESSAGE, number=1, message="Value",) + + headers = proto.RepeatedField(proto.MESSAGE, number=1, message=storage.FieldId,) + + rows = proto.RepeatedField(proto.MESSAGE, number=2, message=Row,) + + +class InspectResult(proto.Message): + r"""All the findings for a single scanned item. + + Attributes: + findings (Sequence[~.dlp.Finding]): + List of findings for an item. + findings_truncated (bool): + If true, then this item might have more + findings than were returned, and the findings + returned are an arbitrary subset of all + findings. The findings list might be truncated + because the input items were too large, or + because the server reached the maximum amount of + resources allowed for a single API call. For + best results, divide the input into smaller + batches. + """ + + findings = proto.RepeatedField(proto.MESSAGE, number=1, message="Finding",) + + findings_truncated = proto.Field(proto.BOOL, number=2) + + +class Finding(proto.Message): + r"""Represents a piece of potentially sensitive content. + + Attributes: + name (str): + Resource name in format + projects/{project}/locations/{location}/findings/{finding} + Populated only when viewing persisted findings. + quote (str): + The content that was found. Even if the content is not + textual, it may be converted to a textual representation + here. Provided if ``include_quote`` is true and the finding + is less than or equal to 4096 bytes long. If the finding + exceeds 4096 bytes in length, the quote may be omitted. + info_type (~.storage.InfoType): + The type of content that might have been found. Provided if + ``excluded_types`` is false. + likelihood (~.storage.Likelihood): + Confidence of how likely it is that the ``info_type`` is + correct. + location (~.dlp.Location): + Where the content was found. + create_time (~.timestamp.Timestamp): + Timestamp when finding was detected. + quote_info (~.dlp.QuoteInfo): + Contains data parsed from quotes. Only populated if + include_quote was set to true and a supported infoType was + requested. Currently supported infoTypes: DATE, + DATE_OF_BIRTH and TIME. + resource_name (str): + The job that stored the finding. + trigger_name (str): + Job trigger name, if applicable, for this + finding. + labels (Sequence[~.dlp.Finding.LabelsEntry]): + The labels associated with this ``Finding``. + + Label keys must be between 1 and 63 characters long and must + conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + + Label values must be between 0 and 63 characters long and + must conform to the regular expression + ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. + + No more than 10 labels can be associated with a given + finding. + + Examples: + + - ``"environment" : "production"`` + - ``"pipeline" : "etl"`` + job_create_time (~.timestamp.Timestamp): + Time the job started that produced this + finding. + job_name (str): + The job that stored the finding. + """ + + name = proto.Field(proto.STRING, number=14) + + quote = proto.Field(proto.STRING, number=1) + + info_type = proto.Field(proto.MESSAGE, number=2, message=storage.InfoType,) + + likelihood = proto.Field(proto.ENUM, number=3, enum=storage.Likelihood,) + + location = proto.Field(proto.MESSAGE, number=4, message="Location",) + + create_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) + + quote_info = proto.Field(proto.MESSAGE, number=7, message="QuoteInfo",) + + resource_name = proto.Field(proto.STRING, number=8) + + trigger_name = proto.Field(proto.STRING, number=9) + + labels = proto.MapField(proto.STRING, proto.STRING, number=10) + + job_create_time = proto.Field( + proto.MESSAGE, number=11, message=timestamp.Timestamp, + ) + + job_name = proto.Field(proto.STRING, number=13) + + +class Location(proto.Message): + r"""Specifies the location of the finding. + + Attributes: + byte_range (~.dlp.Range): + Zero-based byte offsets delimiting the + finding. These are relative to the finding's + containing element. Note that when the content + is not textual, this references the UTF-8 + encoded textual representation of the content. + Omitted if content is an image. + codepoint_range (~.dlp.Range): + Unicode character offsets delimiting the + finding. These are relative to the finding's + containing element. Provided when the content is + text. + content_locations (Sequence[~.dlp.ContentLocation]): + List of nested objects pointing to the + precise location of the finding within the file + or record. + container (~.dlp.Container): + Information about the container where this + finding occurred, if available. + """ + + byte_range = proto.Field(proto.MESSAGE, number=1, message="Range",) + + codepoint_range = proto.Field(proto.MESSAGE, number=2, message="Range",) + + content_locations = proto.RepeatedField( + proto.MESSAGE, number=7, message="ContentLocation", + ) + + container = proto.Field(proto.MESSAGE, number=8, message="Container",) + + +class ContentLocation(proto.Message): + r"""Precise location of the finding within a document, record, + image, or metadata container. + + Attributes: + container_name (str): + Name of the container where the finding is located. The top + level name is the source file name or table name. Names of + some common storage containers are formatted as follows: + + - BigQuery tables: ``{project_id}:{dataset_id}.{table_id}`` + - Cloud Storage files: ``gs://{bucket}/{path}`` + - Datastore namespace: {namespace} + + Nested names could be absent if the embedded object has no + string identifier (for an example an image contained within + a document). + record_location (~.dlp.RecordLocation): + Location within a row or record of a database + table. + image_location (~.dlp.ImageLocation): + Location within an image's pixels. + document_location (~.dlp.DocumentLocation): + Location data for document files. + metadata_location (~.dlp.MetadataLocation): + Location within the metadata for inspected + content. + container_timestamp (~.timestamp.Timestamp): + Findings container modification timestamp, if applicable. + For Google Cloud Storage contains last file modification + timestamp. For BigQuery table contains last_modified_time + property. For Datastore - not populated. + container_version (str): + Findings container version, if available + ("generation" for Google Cloud Storage). + """ + + container_name = proto.Field(proto.STRING, number=1) + + record_location = proto.Field( + proto.MESSAGE, number=2, oneof="location", message="RecordLocation", + ) + + image_location = proto.Field( + proto.MESSAGE, number=3, oneof="location", message="ImageLocation", + ) + + document_location = proto.Field( + proto.MESSAGE, number=5, oneof="location", message="DocumentLocation", + ) + + metadata_location = proto.Field( + proto.MESSAGE, number=8, oneof="location", message="MetadataLocation", + ) + + container_timestamp = proto.Field( + proto.MESSAGE, number=6, message=timestamp.Timestamp, + ) + + container_version = proto.Field(proto.STRING, number=7) + + +class MetadataLocation(proto.Message): + r"""Metadata Location + + Attributes: + type (~.dlp.MetadataType): + Type of metadata containing the finding. + storage_label (~.dlp.StorageMetadataLabel): + Storage metadata. + """ + + type = proto.Field(proto.ENUM, number=1, enum="MetadataType",) + + storage_label = proto.Field( + proto.MESSAGE, number=3, oneof="label", message="StorageMetadataLabel", + ) + + +class StorageMetadataLabel(proto.Message): + r"""Storage metadata label to indicate which metadata entry + contains findings. + + Attributes: + key (str): + + """ + + key = proto.Field(proto.STRING, number=1) + + +class DocumentLocation(proto.Message): + r"""Location of a finding within a document. + + Attributes: + file_offset (int): + Offset of the line, from the beginning of the + file, where the finding is located. + """ + + file_offset = proto.Field(proto.INT64, number=1) + + +class RecordLocation(proto.Message): + r"""Location of a finding within a row or record. + + Attributes: + record_key (~.storage.RecordKey): + Key of the finding. + field_id (~.storage.FieldId): + Field id of the field containing the finding. + table_location (~.dlp.TableLocation): + Location within a ``ContentItem.Table``. + """ + + record_key = proto.Field(proto.MESSAGE, number=1, message=storage.RecordKey,) + + field_id = proto.Field(proto.MESSAGE, number=2, message=storage.FieldId,) + + table_location = proto.Field(proto.MESSAGE, number=3, message="TableLocation",) + + +class TableLocation(proto.Message): + r"""Location of a finding within a table. + + Attributes: + row_index (int): + The zero-based index of the row where the finding is + located. Only populated for resources that have a natural + ordering, not BigQuery. In BigQuery, to identify the row a + finding came from, populate + BigQueryOptions.identifying_fields with your primary key + column names and when you store the findings the value of + those columns will be stored inside of Finding. + """ + + row_index = proto.Field(proto.INT64, number=1) + + +class Container(proto.Message): + r"""Represents a container that may contain DLP findings. + Examples of a container include a file, table, or database + record. + + Attributes: + type (str): + Container type, for example BigQuery or + Google Cloud Storage. + project_id (str): + Project where the finding was found. + Can be different from the project that owns the + finding. + full_path (str): + A string representation of the full container + name. Examples: + - BigQuery: 'Project:DataSetId.TableId' + - Google Cloud Storage: + 'gs://Bucket/folders/filename.txt' + root_path (str): + The root of the container. Examples: + + - For BigQuery table ``project_id:dataset_id.table_id``, + the root is ``dataset_id`` + - For Google Cloud Storage file + ``gs://bucket/folder/filename.txt``, the root is + ``gs://bucket`` + relative_path (str): + The rest of the path after the root. Examples: + + - For BigQuery table ``project_id:dataset_id.table_id``, + the relative path is ``table_id`` + - Google Cloud Storage file + ``gs://bucket/folder/filename.txt``, the relative path is + ``folder/filename.txt`` + update_time (~.timestamp.Timestamp): + Findings container modification timestamp, if applicable. + For Google Cloud Storage contains last file modification + timestamp. For BigQuery table contains last_modified_time + property. For Datastore - not populated. + version (str): + Findings container version, if available + ("generation" for Google Cloud Storage). + """ + + type = proto.Field(proto.STRING, number=1) + + project_id = proto.Field(proto.STRING, number=2) + + full_path = proto.Field(proto.STRING, number=3) + + root_path = proto.Field(proto.STRING, number=4) + + relative_path = proto.Field(proto.STRING, number=5) + + update_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) + + version = proto.Field(proto.STRING, number=7) + + +class Range(proto.Message): + r"""Generic half-open interval [start, end) + + Attributes: + start (int): + Index of the first character of the range + (inclusive). + end (int): + Index of the last character of the range + (exclusive). + """ + + start = proto.Field(proto.INT64, number=1) + + end = proto.Field(proto.INT64, number=2) + + +class ImageLocation(proto.Message): + r"""Location of the finding within an image. + + Attributes: + bounding_boxes (Sequence[~.dlp.BoundingBox]): + Bounding boxes locating the pixels within the + image containing the finding. + """ + + bounding_boxes = proto.RepeatedField( + proto.MESSAGE, number=1, message="BoundingBox", + ) + + +class BoundingBox(proto.Message): + r"""Bounding box encompassing detected text within an image. + + Attributes: + top (int): + Top coordinate of the bounding box. (0,0) is + upper left. + left (int): + Left coordinate of the bounding box. (0,0) is + upper left. + width (int): + Width of the bounding box in pixels. + height (int): + Height of the bounding box in pixels. + """ + + top = proto.Field(proto.INT32, number=1) + + left = proto.Field(proto.INT32, number=2) + + width = proto.Field(proto.INT32, number=3) + + height = proto.Field(proto.INT32, number=4) + + +class RedactImageRequest(proto.Message): + r"""Request to search for potentially sensitive info in an image + and redact it by covering it with a colored rectangle. + + Attributes: + parent (str): + The parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + location_id (str): + Deprecated. This field has no effect. + inspect_config (~.dlp.InspectConfig): + Configuration for the inspector. + image_redaction_configs (Sequence[~.dlp.RedactImageRequest.ImageRedactionConfig]): + The configuration for specifying what content + to redact from images. + include_findings (bool): + Whether the response should include findings + along with the redacted image. + byte_item (~.dlp.ByteContentItem): + The content must be PNG, JPEG, SVG or BMP. + """ + + class ImageRedactionConfig(proto.Message): + r"""Configuration for determining how redaction of images should + occur. + + Attributes: + info_type (~.storage.InfoType): + Only one per info_type should be provided per request. If + not specified, and redact_all_text is false, the DLP API + will redact all text that it matches against all info_types + that are found, but not specified in another + ImageRedactionConfig. + redact_all_text (bool): + If true, all text found in the image, regardless whether it + matches an info_type, is redacted. Only one should be + provided. + redaction_color (~.dlp.Color): + The color to use when redacting content from + an image. If not specified, the default is + black. + """ + + info_type = proto.Field( + proto.MESSAGE, number=1, oneof="target", message=storage.InfoType, + ) + + redact_all_text = proto.Field(proto.BOOL, number=2, oneof="target") + + redaction_color = proto.Field(proto.MESSAGE, number=3, message="Color",) + + parent = proto.Field(proto.STRING, number=1) + + location_id = proto.Field(proto.STRING, number=8) + + inspect_config = proto.Field(proto.MESSAGE, number=2, message=InspectConfig,) + + image_redaction_configs = proto.RepeatedField( + proto.MESSAGE, number=5, message=ImageRedactionConfig, + ) + + include_findings = proto.Field(proto.BOOL, number=6) + + byte_item = proto.Field(proto.MESSAGE, number=7, message=ByteContentItem,) + + +class Color(proto.Message): + r"""Represents a color in the RGB color space. + + Attributes: + red (float): + The amount of red in the color as a value in the interval + [0, 1]. + green (float): + The amount of green in the color as a value in the interval + [0, 1]. + blue (float): + The amount of blue in the color as a value in the interval + [0, 1]. + """ + + red = proto.Field(proto.FLOAT, number=1) + + green = proto.Field(proto.FLOAT, number=2) + + blue = proto.Field(proto.FLOAT, number=3) + + +class RedactImageResponse(proto.Message): + r"""Results of redacting an image. + + Attributes: + redacted_image (bytes): + The redacted image. The type will be the same + as the original image. + extracted_text (str): + If an image was being inspected and the InspectConfig's + include_quote was set to true, then this field will include + all text, if any, that was found in the image. + inspect_result (~.dlp.InspectResult): + The findings. Populated when include_findings in the request + is true. + """ + + redacted_image = proto.Field(proto.BYTES, number=1) + + extracted_text = proto.Field(proto.STRING, number=2) + + inspect_result = proto.Field(proto.MESSAGE, number=3, message=InspectResult,) + + +class DeidentifyContentRequest(proto.Message): + r"""Request to de-identify a list of items. + + Attributes: + parent (str): + Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + deidentify_config (~.dlp.DeidentifyConfig): + Configuration for the de-identification of the content item. + Items specified here will override the template referenced + by the deidentify_template_name argument. + inspect_config (~.dlp.InspectConfig): + Configuration for the inspector. Items specified here will + override the template referenced by the + inspect_template_name argument. + item (~.dlp.ContentItem): + The item to de-identify. Will be treated as + text. + inspect_template_name (str): + Template to use. Any configuration directly specified in + inspect_config will override those set in the template. + Singular fields that are set in this request will replace + their corresponding fields in the template. Repeated fields + are appended. Singular sub-messages and groups are + recursively merged. + deidentify_template_name (str): + Template to use. Any configuration directly specified in + deidentify_config will override those set in the template. + Singular fields that are set in this request will replace + their corresponding fields in the template. Repeated fields + are appended. Singular sub-messages and groups are + recursively merged. + location_id (str): + Deprecated. This field has no effect. + """ + + parent = proto.Field(proto.STRING, number=1) + + deidentify_config = proto.Field( + proto.MESSAGE, number=2, message="DeidentifyConfig", + ) + + inspect_config = proto.Field(proto.MESSAGE, number=3, message=InspectConfig,) + + item = proto.Field(proto.MESSAGE, number=4, message=ContentItem,) + + inspect_template_name = proto.Field(proto.STRING, number=5) + + deidentify_template_name = proto.Field(proto.STRING, number=6) + + location_id = proto.Field(proto.STRING, number=7) + + +class DeidentifyContentResponse(proto.Message): + r"""Results of de-identifying a ContentItem. + + Attributes: + item (~.dlp.ContentItem): + The de-identified item. + overview (~.dlp.TransformationOverview): + An overview of the changes that were made on the ``item``. + """ + + item = proto.Field(proto.MESSAGE, number=1, message=ContentItem,) + + overview = proto.Field(proto.MESSAGE, number=2, message="TransformationOverview",) + + +class ReidentifyContentRequest(proto.Message): + r"""Request to re-identify an item. + + Attributes: + parent (str): + Required. The parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + reidentify_config (~.dlp.DeidentifyConfig): + Configuration for the re-identification of the content item. + This field shares the same proto message type that is used + for de-identification, however its usage here is for the + reversal of the previous de-identification. + Re-identification is performed by examining the + transformations used to de-identify the items and executing + the reverse. This requires that only reversible + transformations be provided here. The reversible + transformations are: + + - ``CryptoDeterministicConfig`` + - ``CryptoReplaceFfxFpeConfig`` + inspect_config (~.dlp.InspectConfig): + Configuration for the inspector. + item (~.dlp.ContentItem): + The item to re-identify. Will be treated as + text. + inspect_template_name (str): + Template to use. Any configuration directly specified in + ``inspect_config`` will override those set in the template. + Singular fields that are set in this request will replace + their corresponding fields in the template. Repeated fields + are appended. Singular sub-messages and groups are + recursively merged. + reidentify_template_name (str): + Template to use. References an instance of + ``DeidentifyTemplate``. Any configuration directly specified + in ``reidentify_config`` or ``inspect_config`` will override + those set in the template. Singular fields that are set in + this request will replace their corresponding fields in the + template. Repeated fields are appended. Singular + sub-messages and groups are recursively merged. + location_id (str): + Deprecated. This field has no effect. + """ + + parent = proto.Field(proto.STRING, number=1) + + reidentify_config = proto.Field( + proto.MESSAGE, number=2, message="DeidentifyConfig", + ) + + inspect_config = proto.Field(proto.MESSAGE, number=3, message=InspectConfig,) + + item = proto.Field(proto.MESSAGE, number=4, message=ContentItem,) + + inspect_template_name = proto.Field(proto.STRING, number=5) + + reidentify_template_name = proto.Field(proto.STRING, number=6) + + location_id = proto.Field(proto.STRING, number=7) + + +class ReidentifyContentResponse(proto.Message): + r"""Results of re-identifying a item. + + Attributes: + item (~.dlp.ContentItem): + The re-identified item. + overview (~.dlp.TransformationOverview): + An overview of the changes that were made to the ``item``. + """ + + item = proto.Field(proto.MESSAGE, number=1, message=ContentItem,) + + overview = proto.Field(proto.MESSAGE, number=2, message="TransformationOverview",) + + +class InspectContentRequest(proto.Message): + r"""Request to search for potentially sensitive info in a + ContentItem. + + Attributes: + parent (str): + Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + inspect_config (~.dlp.InspectConfig): + Configuration for the inspector. What specified here will + override the template referenced by the + inspect_template_name argument. + item (~.dlp.ContentItem): + The item to inspect. + inspect_template_name (str): + Template to use. Any configuration directly specified in + inspect_config will override those set in the template. + Singular fields that are set in this request will replace + their corresponding fields in the template. Repeated fields + are appended. Singular sub-messages and groups are + recursively merged. + location_id (str): + Deprecated. This field has no effect. + """ + + parent = proto.Field(proto.STRING, number=1) + + inspect_config = proto.Field(proto.MESSAGE, number=2, message=InspectConfig,) + + item = proto.Field(proto.MESSAGE, number=3, message=ContentItem,) + + inspect_template_name = proto.Field(proto.STRING, number=4) + + location_id = proto.Field(proto.STRING, number=5) + + +class InspectContentResponse(proto.Message): + r"""Results of inspecting an item. + + Attributes: + result (~.dlp.InspectResult): + The findings. + """ + + result = proto.Field(proto.MESSAGE, number=1, message=InspectResult,) + + +class OutputStorageConfig(proto.Message): + r"""Cloud repository for storing output. + + Attributes: + table (~.storage.BigQueryTable): + Store findings in an existing table or a new table in an + existing dataset. If table_id is not set a new one will be + generated for you with the following format: + dlp_googleapis_yyyy_mm_dd_[dlp_job_id]. Pacific timezone + will be used for generating the date details. + + For Inspect, each column in an existing output table must + have the same name, type, and mode of a field in the + ``Finding`` object. + + For Risk, an existing output table should be the output of a + previous Risk analysis job run on the same source table, + with the same privacy metric and quasi-identifiers. Risk + jobs that analyze the same table but compute a different + privacy metric, or use different sets of quasi-identifiers, + cannot store their results in the same table. + output_schema (~.dlp.OutputStorageConfig.OutputSchema): + Schema used for writing the findings for Inspect jobs. This + field is only used for Inspect and must be unspecified for + Risk jobs. Columns are derived from the ``Finding`` object. + If appending to an existing table, any columns from the + predefined schema that are missing will be added. No columns + in the existing table will be deleted. + + If unspecified, then all available columns will be used for + a new table or an (existing) table with no schema, and no + changes will be made to an existing table that has a schema. + Only for use with external storage. + """ + + class OutputSchema(proto.Enum): + r"""Predefined schemas for storing findings. + Only for use with external storage. + """ + OUTPUT_SCHEMA_UNSPECIFIED = 0 + BASIC_COLUMNS = 1 + GCS_COLUMNS = 2 + DATASTORE_COLUMNS = 3 + BIG_QUERY_COLUMNS = 4 + ALL_COLUMNS = 5 + + table = proto.Field( + proto.MESSAGE, number=1, oneof="type", message=storage.BigQueryTable, + ) + + output_schema = proto.Field(proto.ENUM, number=3, enum=OutputSchema,) + + +class InfoTypeStats(proto.Message): + r"""Statistics regarding a specific InfoType. + + Attributes: + info_type (~.storage.InfoType): + The type of finding this stat is for. + count (int): + Number of findings for this infoType. + """ + + info_type = proto.Field(proto.MESSAGE, number=1, message=storage.InfoType,) + + count = proto.Field(proto.INT64, number=2) + + +class InspectDataSourceDetails(proto.Message): + r"""The results of an inspect DataSource job. + + Attributes: + requested_options (~.dlp.InspectDataSourceDetails.RequestedOptions): + The configuration used for this job. + result (~.dlp.InspectDataSourceDetails.Result): + A summary of the outcome of this inspect job. + """ + + class RequestedOptions(proto.Message): + r"""Snapshot of the inspection configuration. + + Attributes: + snapshot_inspect_template (~.dlp.InspectTemplate): + If run with an InspectTemplate, a snapshot of + its state at the time of this run. + job_config (~.dlp.InspectJobConfig): + Inspect config. + """ + + snapshot_inspect_template = proto.Field( + proto.MESSAGE, number=1, message="InspectTemplate", + ) + + job_config = proto.Field(proto.MESSAGE, number=3, message="InspectJobConfig",) + + class Result(proto.Message): + r"""All result fields mentioned below are updated while the job + is processing. + + Attributes: + processed_bytes (int): + Total size in bytes that were processed. + total_estimated_bytes (int): + Estimate of the number of bytes to process. + info_type_stats (Sequence[~.dlp.InfoTypeStats]): + Statistics of how many instances of each info + type were found during inspect job. + hybrid_stats (~.dlp.HybridInspectStatistics): + Statistics related to the processing of + hybrid inspect. Early access feature is in a + pre-release state and might change or have + limited support. For more information, see + https://cloud.google.com/products#product- + launch-stages. + """ + + processed_bytes = proto.Field(proto.INT64, number=1) + + total_estimated_bytes = proto.Field(proto.INT64, number=2) + + info_type_stats = proto.RepeatedField( + proto.MESSAGE, number=3, message=InfoTypeStats, + ) + + hybrid_stats = proto.Field( + proto.MESSAGE, number=7, message="HybridInspectStatistics", + ) + + requested_options = proto.Field(proto.MESSAGE, number=2, message=RequestedOptions,) + + result = proto.Field(proto.MESSAGE, number=3, message=Result,) + + +class HybridInspectStatistics(proto.Message): + r"""Statistics related to processing hybrid inspect requests. + + Attributes: + processed_count (int): + The number of hybrid inspection requests + processed within this job. + aborted_count (int): + The number of hybrid inspection requests + aborted because the job ran out of quota or was + ended before they could be processed. + pending_count (int): + The number of hybrid requests currently being processed. + Only populated when called via method ``getDlpJob``. A burst + of traffic may cause hybrid inspect requests to be enqueued. + Processing will take place as quickly as possible, but + resource limitations may impact how long a request is + enqueued for. + """ + + processed_count = proto.Field(proto.INT64, number=1) + + aborted_count = proto.Field(proto.INT64, number=2) + + pending_count = proto.Field(proto.INT64, number=3) + + +class InfoTypeDescription(proto.Message): + r"""InfoType description. + + Attributes: + name (str): + Internal name of the infoType. + display_name (str): + Human readable form of the infoType name. + supported_by (Sequence[~.dlp.InfoTypeSupportedBy]): + Which parts of the API supports this + InfoType. + description (str): + Description of the infotype. Translated when + language is provided in the request. + """ + + name = proto.Field(proto.STRING, number=1) + + display_name = proto.Field(proto.STRING, number=2) + + supported_by = proto.RepeatedField( + proto.ENUM, number=3, enum="InfoTypeSupportedBy", + ) + + description = proto.Field(proto.STRING, number=4) + + +class ListInfoTypesRequest(proto.Message): + r"""Request for the list of infoTypes. + + Attributes: + parent (str): + The parent resource name. + + - Format:locations/[LOCATION-ID] + language_code (str): + BCP-47 language code for localized infoType + friendly names. If omitted, or if localized + strings are not available, en-US strings will be + returned. + filter (str): + filter to only return infoTypes supported by certain parts + of the API. Defaults to supported_by=INSPECT. + location_id (str): + Deprecated. This field has no effect. + """ + + parent = proto.Field(proto.STRING, number=4) + + language_code = proto.Field(proto.STRING, number=1) + + filter = proto.Field(proto.STRING, number=2) + + location_id = proto.Field(proto.STRING, number=3) + + +class ListInfoTypesResponse(proto.Message): + r"""Response to the ListInfoTypes request. + + Attributes: + info_types (Sequence[~.dlp.InfoTypeDescription]): + Set of sensitive infoTypes. + """ + + info_types = proto.RepeatedField( + proto.MESSAGE, number=1, message=InfoTypeDescription, + ) + + +class RiskAnalysisJobConfig(proto.Message): + r"""Configuration for a risk analysis job. See + https://cloud.google.com/dlp/docs/concepts-risk-analysis to + learn more. + + Attributes: + privacy_metric (~.dlp.PrivacyMetric): + Privacy metric to compute. + source_table (~.storage.BigQueryTable): + Input dataset to compute metrics over. + actions (Sequence[~.dlp.Action]): + Actions to execute at the completion of the + job. Are executed in the order provided. + """ + + privacy_metric = proto.Field(proto.MESSAGE, number=1, message="PrivacyMetric",) + + source_table = proto.Field(proto.MESSAGE, number=2, message=storage.BigQueryTable,) + + actions = proto.RepeatedField(proto.MESSAGE, number=3, message="Action",) + + +class QuasiId(proto.Message): + r"""A column with a semantic tag attached. + + Attributes: + field (~.storage.FieldId): + Required. Identifies the column. + info_type (~.storage.InfoType): + A column can be tagged with a InfoType to use the relevant + public dataset as a statistical model of population, if + available. We currently support US ZIP codes, region codes, + ages and genders. To programmatically obtain the list of + supported InfoTypes, use ListInfoTypes with the + supported_by=RISK_ANALYSIS filter. + custom_tag (str): + A column can be tagged with a custom tag. In + this case, the user must indicate an auxiliary + table that contains statistical information on + the possible values of this column (below). + inferred (~.empty.Empty): + If no semantic tag is indicated, we infer the + statistical model from the distribution of + values in the input data + """ + + field = proto.Field(proto.MESSAGE, number=1, message=storage.FieldId,) + + info_type = proto.Field( + proto.MESSAGE, number=2, oneof="tag", message=storage.InfoType, + ) + + custom_tag = proto.Field(proto.STRING, number=3, oneof="tag") + + inferred = proto.Field(proto.MESSAGE, number=4, oneof="tag", message=empty.Empty,) + + +class StatisticalTable(proto.Message): + r"""An auxiliary table containing statistical information on the + relative frequency of different quasi-identifiers values. It has + one or several quasi-identifiers columns, and one column that + indicates the relative frequency of each quasi-identifier tuple. + If a tuple is present in the data but not in the auxiliary + table, the corresponding relative frequency is assumed to be + zero (and thus, the tuple is highly reidentifiable). + + Attributes: + table (~.storage.BigQueryTable): + Required. Auxiliary table location. + quasi_ids (Sequence[~.dlp.StatisticalTable.QuasiIdentifierField]): + Required. Quasi-identifier columns. + relative_frequency (~.storage.FieldId): + Required. The relative frequency column must + contain a floating-point number between 0 and 1 + (inclusive). Null values are assumed to be zero. + """ + + class QuasiIdentifierField(proto.Message): + r"""A quasi-identifier column has a custom_tag, used to know which + column in the data corresponds to which column in the statistical + model. + + Attributes: + field (~.storage.FieldId): + Identifies the column. + custom_tag (str): + A column can be tagged with a custom tag. In + this case, the user must indicate an auxiliary + table that contains statistical information on + the possible values of this column (below). + """ + + field = proto.Field(proto.MESSAGE, number=1, message=storage.FieldId,) + + custom_tag = proto.Field(proto.STRING, number=2) + + table = proto.Field(proto.MESSAGE, number=3, message=storage.BigQueryTable,) + + quasi_ids = proto.RepeatedField( + proto.MESSAGE, number=1, message=QuasiIdentifierField, + ) + + relative_frequency = proto.Field(proto.MESSAGE, number=2, message=storage.FieldId,) + + +class PrivacyMetric(proto.Message): + r"""Privacy metric to compute for reidentification risk analysis. + + Attributes: + numerical_stats_config (~.dlp.PrivacyMetric.NumericalStatsConfig): + Numerical stats + categorical_stats_config (~.dlp.PrivacyMetric.CategoricalStatsConfig): + Categorical stats + k_anonymity_config (~.dlp.PrivacyMetric.KAnonymityConfig): + K-anonymity + l_diversity_config (~.dlp.PrivacyMetric.LDiversityConfig): + l-diversity + k_map_estimation_config (~.dlp.PrivacyMetric.KMapEstimationConfig): + k-map + delta_presence_estimation_config (~.dlp.PrivacyMetric.DeltaPresenceEstimationConfig): + delta-presence + """ + + class NumericalStatsConfig(proto.Message): + r"""Compute numerical stats over an individual column, including + min, max, and quantiles. + + Attributes: + field (~.storage.FieldId): + Field to compute numerical stats on. + Supported types are integer, float, date, + datetime, timestamp, time. + """ + + field = proto.Field(proto.MESSAGE, number=1, message=storage.FieldId,) + + class CategoricalStatsConfig(proto.Message): + r"""Compute numerical stats over an individual column, including + number of distinct values and value count distribution. + + Attributes: + field (~.storage.FieldId): + Field to compute categorical stats on. All + column types are supported except for arrays and + structs. However, it may be more informative to + use NumericalStats when the field type is + supported, depending on the data. + """ + + field = proto.Field(proto.MESSAGE, number=1, message=storage.FieldId,) + + class KAnonymityConfig(proto.Message): + r"""k-anonymity metric, used for analysis of reidentification + risk. + + Attributes: + quasi_ids (Sequence[~.storage.FieldId]): + Set of fields to compute k-anonymity over. + When multiple fields are specified, they are + considered a single composite key. Structs and + repeated data types are not supported; however, + nested fields are supported so long as they are + not structs themselves or nested within a + repeated field. + entity_id (~.storage.EntityId): + Message indicating that multiple rows might be associated to + a single individual. If the same entity_id is associated to + multiple quasi-identifier tuples over distinct rows, we + consider the entire collection of tuples as the composite + quasi-identifier. This collection is a multiset: the order + in which the different tuples appear in the dataset is + ignored, but their frequency is taken into account. + + Important note: a maximum of 1000 rows can be associated to + a single entity ID. If more rows are associated with the + same entity ID, some might be ignored. + """ + + quasi_ids = proto.RepeatedField( + proto.MESSAGE, number=1, message=storage.FieldId, + ) + + entity_id = proto.Field(proto.MESSAGE, number=2, message=storage.EntityId,) + + class LDiversityConfig(proto.Message): + r"""l-diversity metric, used for analysis of reidentification + risk. + + Attributes: + quasi_ids (Sequence[~.storage.FieldId]): + Set of quasi-identifiers indicating how + equivalence classes are defined for the + l-diversity computation. When multiple fields + are specified, they are considered a single + composite key. + sensitive_attribute (~.storage.FieldId): + Sensitive field for computing the l-value. + """ + + quasi_ids = proto.RepeatedField( + proto.MESSAGE, number=1, message=storage.FieldId, + ) + + sensitive_attribute = proto.Field( + proto.MESSAGE, number=2, message=storage.FieldId, + ) + + class KMapEstimationConfig(proto.Message): + r"""Reidentifiability metric. This corresponds to a risk model + similar to what is called "journalist risk" in the literature, + except the attack dataset is statistically modeled instead of + being perfectly known. This can be done using publicly available + data (like the US Census), or using a custom statistical model + (indicated as one or several BigQuery tables), or by + extrapolating from the distribution of values in the input + dataset. + + Attributes: + quasi_ids (Sequence[~.dlp.PrivacyMetric.KMapEstimationConfig.TaggedField]): + Required. Fields considered to be quasi- + dentifiers. No two columns can have the same + tag. + region_code (str): + ISO 3166-1 alpha-2 region code to use in the statistical + modeling. Set if no column is tagged with a region-specific + InfoType (like US_ZIP_5) or a region code. + auxiliary_tables (Sequence[~.dlp.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable]): + Several auxiliary tables can be used in the analysis. Each + custom_tag used to tag a quasi-identifiers column must + appear in exactly one column of one auxiliary table. + """ + + class TaggedField(proto.Message): + r"""A column with a semantic tag attached. + + Attributes: + field (~.storage.FieldId): + Required. Identifies the column. + info_type (~.storage.InfoType): + A column can be tagged with a InfoType to use the relevant + public dataset as a statistical model of population, if + available. We currently support US ZIP codes, region codes, + ages and genders. To programmatically obtain the list of + supported InfoTypes, use ListInfoTypes with the + supported_by=RISK_ANALYSIS filter. + custom_tag (str): + A column can be tagged with a custom tag. In + this case, the user must indicate an auxiliary + table that contains statistical information on + the possible values of this column (below). + inferred (~.empty.Empty): + If no semantic tag is indicated, we infer the + statistical model from the distribution of + values in the input data + """ + + field = proto.Field(proto.MESSAGE, number=1, message=storage.FieldId,) + + info_type = proto.Field( + proto.MESSAGE, number=2, oneof="tag", message=storage.InfoType, + ) + + custom_tag = proto.Field(proto.STRING, number=3, oneof="tag") + + inferred = proto.Field( + proto.MESSAGE, number=4, oneof="tag", message=empty.Empty, + ) + + class AuxiliaryTable(proto.Message): + r"""An auxiliary table contains statistical information on the + relative frequency of different quasi-identifiers values. It has + one or several quasi-identifiers columns, and one column that + indicates the relative frequency of each quasi-identifier tuple. + If a tuple is present in the data but not in the auxiliary + table, the corresponding relative frequency is assumed to be + zero (and thus, the tuple is highly reidentifiable). + + Attributes: + table (~.storage.BigQueryTable): + Required. Auxiliary table location. + quasi_ids (Sequence[~.dlp.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField]): + Required. Quasi-identifier columns. + relative_frequency (~.storage.FieldId): + Required. The relative frequency column must + contain a floating-point number between 0 and 1 + (inclusive). Null values are assumed to be zero. + """ + + class QuasiIdField(proto.Message): + r"""A quasi-identifier column has a custom_tag, used to know which + column in the data corresponds to which column in the statistical + model. + + Attributes: + field (~.storage.FieldId): + Identifies the column. + custom_tag (str): + A auxiliary field. + """ + + field = proto.Field(proto.MESSAGE, number=1, message=storage.FieldId,) + + custom_tag = proto.Field(proto.STRING, number=2) + + table = proto.Field(proto.MESSAGE, number=3, message=storage.BigQueryTable,) + + quasi_ids = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField", + ) + + relative_frequency = proto.Field( + proto.MESSAGE, number=2, message=storage.FieldId, + ) + + quasi_ids = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="PrivacyMetric.KMapEstimationConfig.TaggedField", + ) + + region_code = proto.Field(proto.STRING, number=2) + + auxiliary_tables = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="PrivacyMetric.KMapEstimationConfig.AuxiliaryTable", + ) + + class DeltaPresenceEstimationConfig(proto.Message): + r"""δ-presence metric, used to estimate how likely it is for an + attacker to figure out that one given individual appears in a + de-identified dataset. Similarly to the k-map metric, we cannot + compute δ-presence exactly without knowing the attack dataset, + so we use a statistical model instead. + + Attributes: + quasi_ids (Sequence[~.dlp.QuasiId]): + Required. Fields considered to be quasi- + dentifiers. No two fields can have the same tag. + region_code (str): + ISO 3166-1 alpha-2 region code to use in the statistical + modeling. Set if no column is tagged with a region-specific + InfoType (like US_ZIP_5) or a region code. + auxiliary_tables (Sequence[~.dlp.StatisticalTable]): + Several auxiliary tables can be used in the analysis. Each + custom_tag used to tag a quasi-identifiers field must appear + in exactly one field of one auxiliary table. + """ + + quasi_ids = proto.RepeatedField(proto.MESSAGE, number=1, message=QuasiId,) + + region_code = proto.Field(proto.STRING, number=2) + + auxiliary_tables = proto.RepeatedField( + proto.MESSAGE, number=3, message=StatisticalTable, + ) + + numerical_stats_config = proto.Field( + proto.MESSAGE, number=1, oneof="type", message=NumericalStatsConfig, + ) + + categorical_stats_config = proto.Field( + proto.MESSAGE, number=2, oneof="type", message=CategoricalStatsConfig, + ) + + k_anonymity_config = proto.Field( + proto.MESSAGE, number=3, oneof="type", message=KAnonymityConfig, + ) + + l_diversity_config = proto.Field( + proto.MESSAGE, number=4, oneof="type", message=LDiversityConfig, + ) + + k_map_estimation_config = proto.Field( + proto.MESSAGE, number=5, oneof="type", message=KMapEstimationConfig, + ) + + delta_presence_estimation_config = proto.Field( + proto.MESSAGE, number=6, oneof="type", message=DeltaPresenceEstimationConfig, + ) + + +class AnalyzeDataSourceRiskDetails(proto.Message): + r"""Result of a risk analysis operation request. + + Attributes: + requested_privacy_metric (~.dlp.PrivacyMetric): + Privacy metric to compute. + requested_source_table (~.storage.BigQueryTable): + Input dataset to compute metrics over. + numerical_stats_result (~.dlp.AnalyzeDataSourceRiskDetails.NumericalStatsResult): + Numerical stats result + categorical_stats_result (~.dlp.AnalyzeDataSourceRiskDetails.CategoricalStatsResult): + Categorical stats result + k_anonymity_result (~.dlp.AnalyzeDataSourceRiskDetails.KAnonymityResult): + K-anonymity result + l_diversity_result (~.dlp.AnalyzeDataSourceRiskDetails.LDiversityResult): + L-divesity result + k_map_estimation_result (~.dlp.AnalyzeDataSourceRiskDetails.KMapEstimationResult): + K-map result + delta_presence_estimation_result (~.dlp.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult): + Delta-presence result + """ + + class NumericalStatsResult(proto.Message): + r"""Result of the numerical stats computation. + + Attributes: + min_value (~.dlp.Value): + Minimum value appearing in the column. + max_value (~.dlp.Value): + Maximum value appearing in the column. + quantile_values (Sequence[~.dlp.Value]): + List of 99 values that partition the set of + field values into 100 equal sized buckets. + """ + + min_value = proto.Field(proto.MESSAGE, number=1, message="Value",) + + max_value = proto.Field(proto.MESSAGE, number=2, message="Value",) + + quantile_values = proto.RepeatedField(proto.MESSAGE, number=4, message="Value",) + + class CategoricalStatsResult(proto.Message): + r"""Result of the categorical stats computation. + + Attributes: + value_frequency_histogram_buckets (Sequence[~.dlp.AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket]): + Histogram of value frequencies in the column. + """ + + class CategoricalStatsHistogramBucket(proto.Message): + r"""Histogram of value frequencies in the column. + + Attributes: + value_frequency_lower_bound (int): + Lower bound on the value frequency of the + values in this bucket. + value_frequency_upper_bound (int): + Upper bound on the value frequency of the + values in this bucket. + bucket_size (int): + Total number of values in this bucket. + bucket_values (Sequence[~.dlp.ValueFrequency]): + Sample of value frequencies in this bucket. + The total number of values returned per bucket + is capped at 20. + bucket_value_count (int): + Total number of distinct values in this + bucket. + """ + + value_frequency_lower_bound = proto.Field(proto.INT64, number=1) + + value_frequency_upper_bound = proto.Field(proto.INT64, number=2) + + bucket_size = proto.Field(proto.INT64, number=3) + + bucket_values = proto.RepeatedField( + proto.MESSAGE, number=4, message="ValueFrequency", + ) + + bucket_value_count = proto.Field(proto.INT64, number=5) + + value_frequency_histogram_buckets = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket", + ) + + class KAnonymityResult(proto.Message): + r"""Result of the k-anonymity computation. + + Attributes: + equivalence_class_histogram_buckets (Sequence[~.dlp.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket]): + Histogram of k-anonymity equivalence classes. + """ + + class KAnonymityEquivalenceClass(proto.Message): + r"""The set of columns' values that share the same ldiversity + value + + Attributes: + quasi_ids_values (Sequence[~.dlp.Value]): + Set of values defining the equivalence class. + One value per quasi-identifier column in the + original KAnonymity metric message. The order is + always the same as the original request. + equivalence_class_size (int): + Size of the equivalence class, for example + number of rows with the above set of values. + """ + + quasi_ids_values = proto.RepeatedField( + proto.MESSAGE, number=1, message="Value", + ) + + equivalence_class_size = proto.Field(proto.INT64, number=2) + + class KAnonymityHistogramBucket(proto.Message): + r"""Histogram of k-anonymity equivalence classes. + + Attributes: + equivalence_class_size_lower_bound (int): + Lower bound on the size of the equivalence + classes in this bucket. + equivalence_class_size_upper_bound (int): + Upper bound on the size of the equivalence + classes in this bucket. + bucket_size (int): + Total number of equivalence classes in this + bucket. + bucket_values (Sequence[~.dlp.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass]): + Sample of equivalence classes in this bucket. + The total number of classes returned per bucket + is capped at 20. + bucket_value_count (int): + Total number of distinct equivalence classes + in this bucket. + """ + + equivalence_class_size_lower_bound = proto.Field(proto.INT64, number=1) + + equivalence_class_size_upper_bound = proto.Field(proto.INT64, number=2) + + bucket_size = proto.Field(proto.INT64, number=3) + + bucket_values = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass", + ) + + bucket_value_count = proto.Field(proto.INT64, number=5) + + equivalence_class_histogram_buckets = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket", + ) + + class LDiversityResult(proto.Message): + r"""Result of the l-diversity computation. + + Attributes: + sensitive_value_frequency_histogram_buckets (Sequence[~.dlp.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket]): + Histogram of l-diversity equivalence class + sensitive value frequencies. + """ + + class LDiversityEquivalenceClass(proto.Message): + r"""The set of columns' values that share the same ldiversity + value. + + Attributes: + quasi_ids_values (Sequence[~.dlp.Value]): + Quasi-identifier values defining the + k-anonymity equivalence class. The order is + always the same as the original request. + equivalence_class_size (int): + Size of the k-anonymity equivalence class. + num_distinct_sensitive_values (int): + Number of distinct sensitive values in this + equivalence class. + top_sensitive_values (Sequence[~.dlp.ValueFrequency]): + Estimated frequencies of top sensitive + values. + """ + + quasi_ids_values = proto.RepeatedField( + proto.MESSAGE, number=1, message="Value", + ) + + equivalence_class_size = proto.Field(proto.INT64, number=2) + + num_distinct_sensitive_values = proto.Field(proto.INT64, number=3) + + top_sensitive_values = proto.RepeatedField( + proto.MESSAGE, number=4, message="ValueFrequency", + ) + + class LDiversityHistogramBucket(proto.Message): + r"""Histogram of l-diversity equivalence class sensitive value + frequencies. + + Attributes: + sensitive_value_frequency_lower_bound (int): + Lower bound on the sensitive value + frequencies of the equivalence classes in this + bucket. + sensitive_value_frequency_upper_bound (int): + Upper bound on the sensitive value + frequencies of the equivalence classes in this + bucket. + bucket_size (int): + Total number of equivalence classes in this + bucket. + bucket_values (Sequence[~.dlp.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass]): + Sample of equivalence classes in this bucket. + The total number of classes returned per bucket + is capped at 20. + bucket_value_count (int): + Total number of distinct equivalence classes + in this bucket. + """ + + sensitive_value_frequency_lower_bound = proto.Field(proto.INT64, number=1) + + sensitive_value_frequency_upper_bound = proto.Field(proto.INT64, number=2) + + bucket_size = proto.Field(proto.INT64, number=3) + + bucket_values = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass", + ) + + bucket_value_count = proto.Field(proto.INT64, number=5) + + sensitive_value_frequency_histogram_buckets = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket", + ) + + class KMapEstimationResult(proto.Message): + r"""Result of the reidentifiability analysis. Note that these + results are an estimation, not exact values. + + Attributes: + k_map_estimation_histogram (Sequence[~.dlp.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket]): + The intervals [min_anonymity, max_anonymity] do not overlap. + If a value doesn't correspond to any such interval, the + associated frequency is zero. For example, the following + records: {min_anonymity: 1, max_anonymity: 1, frequency: 17} + {min_anonymity: 2, max_anonymity: 3, frequency: 42} + {min_anonymity: 5, max_anonymity: 10, frequency: 99} mean + that there are no record with an estimated anonymity of 4, + 5, or larger than 10. + """ + + class KMapEstimationQuasiIdValues(proto.Message): + r"""A tuple of values for the quasi-identifier columns. + + Attributes: + quasi_ids_values (Sequence[~.dlp.Value]): + The quasi-identifier values. + estimated_anonymity (int): + The estimated anonymity for these quasi- + dentifier values. + """ + + quasi_ids_values = proto.RepeatedField( + proto.MESSAGE, number=1, message="Value", + ) + + estimated_anonymity = proto.Field(proto.INT64, number=2) + + class KMapEstimationHistogramBucket(proto.Message): + r"""A KMapEstimationHistogramBucket message with the following values: + min_anonymity: 3 max_anonymity: 5 frequency: 42 means that there are + 42 records whose quasi-identifier values correspond to 3, 4 or 5 + people in the overlying population. An important particular case is + when min_anonymity = max_anonymity = 1: the frequency field then + corresponds to the number of uniquely identifiable records. + + Attributes: + min_anonymity (int): + Always positive. + max_anonymity (int): + Always greater than or equal to min_anonymity. + bucket_size (int): + Number of records within these anonymity + bounds. + bucket_values (Sequence[~.dlp.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues]): + Sample of quasi-identifier tuple values in + this bucket. The total number of classes + returned per bucket is capped at 20. + bucket_value_count (int): + Total number of distinct quasi-identifier + tuple values in this bucket. + """ + + min_anonymity = proto.Field(proto.INT64, number=1) + + max_anonymity = proto.Field(proto.INT64, number=2) + + bucket_size = proto.Field(proto.INT64, number=5) + + bucket_values = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues", + ) + + bucket_value_count = proto.Field(proto.INT64, number=7) + + k_map_estimation_histogram = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket", + ) + + class DeltaPresenceEstimationResult(proto.Message): + r"""Result of the δ-presence computation. Note that these results + are an estimation, not exact values. + + Attributes: + delta_presence_estimation_histogram (Sequence[~.dlp.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket]): + The intervals [min_probability, max_probability) do not + overlap. If a value doesn't correspond to any such interval, + the associated frequency is zero. For example, the following + records: {min_probability: 0, max_probability: 0.1, + frequency: 17} {min_probability: 0.2, max_probability: 0.3, + frequency: 42} {min_probability: 0.3, max_probability: 0.4, + frequency: 99} mean that there are no record with an + estimated probability in [0.1, 0.2) nor larger or equal to + 0.4. + """ + + class DeltaPresenceEstimationQuasiIdValues(proto.Message): + r"""A tuple of values for the quasi-identifier columns. + + Attributes: + quasi_ids_values (Sequence[~.dlp.Value]): + The quasi-identifier values. + estimated_probability (float): + The estimated probability that a given individual sharing + these quasi-identifier values is in the dataset. This value, + typically called δ, is the ratio between the number of + records in the dataset with these quasi-identifier values, + and the total number of individuals (inside *and* outside + the dataset) with these quasi-identifier values. For + example, if there are 15 individuals in the dataset who + share the same quasi-identifier values, and an estimated 100 + people in the entire population with these values, then δ is + 0.15. + """ + + quasi_ids_values = proto.RepeatedField( + proto.MESSAGE, number=1, message="Value", + ) + + estimated_probability = proto.Field(proto.DOUBLE, number=2) + + class DeltaPresenceEstimationHistogramBucket(proto.Message): + r"""A DeltaPresenceEstimationHistogramBucket message with the following + values: min_probability: 0.1 max_probability: 0.2 frequency: 42 + means that there are 42 records for which δ is in [0.1, 0.2). An + important particular case is when min_probability = max_probability + = 1: then, every individual who shares this quasi-identifier + combination is in the dataset. + + Attributes: + min_probability (float): + Between 0 and 1. + max_probability (float): + Always greater than or equal to min_probability. + bucket_size (int): + Number of records within these probability + bounds. + bucket_values (Sequence[~.dlp.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues]): + Sample of quasi-identifier tuple values in + this bucket. The total number of classes + returned per bucket is capped at 20. + bucket_value_count (int): + Total number of distinct quasi-identifier + tuple values in this bucket. + """ + + min_probability = proto.Field(proto.DOUBLE, number=1) + + max_probability = proto.Field(proto.DOUBLE, number=2) + + bucket_size = proto.Field(proto.INT64, number=5) + + bucket_values = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues", + ) + + bucket_value_count = proto.Field(proto.INT64, number=7) + + delta_presence_estimation_histogram = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket", + ) + + requested_privacy_metric = proto.Field( + proto.MESSAGE, number=1, message=PrivacyMetric, + ) + + requested_source_table = proto.Field( + proto.MESSAGE, number=2, message=storage.BigQueryTable, + ) + + numerical_stats_result = proto.Field( + proto.MESSAGE, number=3, oneof="result", message=NumericalStatsResult, + ) + + categorical_stats_result = proto.Field( + proto.MESSAGE, number=4, oneof="result", message=CategoricalStatsResult, + ) + + k_anonymity_result = proto.Field( + proto.MESSAGE, number=5, oneof="result", message=KAnonymityResult, + ) + + l_diversity_result = proto.Field( + proto.MESSAGE, number=6, oneof="result", message=LDiversityResult, + ) + + k_map_estimation_result = proto.Field( + proto.MESSAGE, number=7, oneof="result", message=KMapEstimationResult, + ) + + delta_presence_estimation_result = proto.Field( + proto.MESSAGE, number=9, oneof="result", message=DeltaPresenceEstimationResult, + ) + + +class ValueFrequency(proto.Message): + r"""A value of a field, including its frequency. + + Attributes: + value (~.dlp.Value): + A value contained in the field in question. + count (int): + How many times the value is contained in the + field. + """ + + value = proto.Field(proto.MESSAGE, number=1, message="Value",) + + count = proto.Field(proto.INT64, number=2) + + +class Value(proto.Message): + r"""Set of primitive values supported by the system. Note that for the + purposes of inspection or transformation, the number of bytes + considered to comprise a 'Value' is based on its representation as a + UTF-8 encoded string. For example, if 'integer_value' is set to + 123456789, the number of bytes would be counted as 9, even though an + int64 only holds up to 8 bytes of data. + + Attributes: + integer_value (int): + integer + float_value (float): + float + string_value (str): + string + boolean_value (bool): + boolean + timestamp_value (~.timestamp.Timestamp): + timestamp + time_value (~.timeofday.TimeOfDay): + time of day + date_value (~.gt_date.Date): + date + day_of_week_value (~.dayofweek.DayOfWeek): + day of week + """ + + integer_value = proto.Field(proto.INT64, number=1, oneof="type") + + float_value = proto.Field(proto.DOUBLE, number=2, oneof="type") + + string_value = proto.Field(proto.STRING, number=3, oneof="type") + + boolean_value = proto.Field(proto.BOOL, number=4, oneof="type") + + timestamp_value = proto.Field( + proto.MESSAGE, number=5, oneof="type", message=timestamp.Timestamp, + ) + + time_value = proto.Field( + proto.MESSAGE, number=6, oneof="type", message=timeofday.TimeOfDay, + ) + + date_value = proto.Field( + proto.MESSAGE, number=7, oneof="type", message=gt_date.Date, + ) + + day_of_week_value = proto.Field( + proto.ENUM, number=8, oneof="type", enum=dayofweek.DayOfWeek, + ) + + +class QuoteInfo(proto.Message): + r"""Message for infoType-dependent details parsed from quote. + + Attributes: + date_time (~.dlp.DateTime): + The date time indicated by the quote. + """ + + date_time = proto.Field( + proto.MESSAGE, number=2, oneof="parsed_quote", message="DateTime", + ) + + +class DateTime(proto.Message): + r"""Message for a date time object. + e.g. 2018-01-01, 5th August. + + Attributes: + date (~.gt_date.Date): + One or more of the following must be set. + Must be a valid date or time value. + day_of_week (~.dayofweek.DayOfWeek): + Day of week + time (~.timeofday.TimeOfDay): + Time of day + time_zone (~.dlp.DateTime.TimeZone): + Time zone + """ + + class TimeZone(proto.Message): + r"""Time zone of the date time object. + + Attributes: + offset_minutes (int): + Set only if the offset can be determined. + Positive for time ahead of UTC. E.g. For + "UTC-9", this value is -540. + """ + + offset_minutes = proto.Field(proto.INT32, number=1) + + date = proto.Field(proto.MESSAGE, number=1, message=gt_date.Date,) + + day_of_week = proto.Field(proto.ENUM, number=2, enum=dayofweek.DayOfWeek,) + + time = proto.Field(proto.MESSAGE, number=3, message=timeofday.TimeOfDay,) + + time_zone = proto.Field(proto.MESSAGE, number=4, message=TimeZone,) + + +class DeidentifyConfig(proto.Message): + r"""The configuration that controls how the data will change. + + Attributes: + info_type_transformations (~.dlp.InfoTypeTransformations): + Treat the dataset as free-form text and apply + the same free text transformation everywhere. + record_transformations (~.dlp.RecordTransformations): + Treat the dataset as structured. + Transformations can be applied to specific + locations within structured datasets, such as + transforming a column within a table. + transformation_error_handling (~.dlp.TransformationErrorHandling): + Mode for handling transformation errors. If left + unspecified, the default mode is + ``TransformationErrorHandling.ThrowError``. + """ + + info_type_transformations = proto.Field( + proto.MESSAGE, + number=1, + oneof="transformation", + message="InfoTypeTransformations", + ) + + record_transformations = proto.Field( + proto.MESSAGE, + number=2, + oneof="transformation", + message="RecordTransformations", + ) + + transformation_error_handling = proto.Field( + proto.MESSAGE, number=3, message="TransformationErrorHandling", + ) + + +class TransformationErrorHandling(proto.Message): + r"""How to handle transformation errors during de-identification. A + transformation error occurs when the requested transformation is + incompatible with the data. For example, trying to de-identify an IP + address using a ``DateShift`` transformation would result in a + transformation error, since date info cannot be extracted from an IP + address. Information about any incompatible transformations, and how + they were handled, is returned in the response as part of the + ``TransformationOverviews``. + + Attributes: + throw_error (~.dlp.TransformationErrorHandling.ThrowError): + Throw an error + leave_untransformed (~.dlp.TransformationErrorHandling.LeaveUntransformed): + Ignore errors + """ + + class ThrowError(proto.Message): + r"""Throw an error and fail the request when a transformation + error occurs. + """ + + class LeaveUntransformed(proto.Message): + r"""Skips the data without modifying it if the requested transformation + would cause an error. For example, if a ``DateShift`` transformation + were applied an an IP address, this mode would leave the IP address + unchanged in the response. + """ + + throw_error = proto.Field( + proto.MESSAGE, number=1, oneof="mode", message=ThrowError, + ) + + leave_untransformed = proto.Field( + proto.MESSAGE, number=2, oneof="mode", message=LeaveUntransformed, + ) + + +class PrimitiveTransformation(proto.Message): + r"""A rule for transforming a value. + + Attributes: + replace_config (~.dlp.ReplaceValueConfig): + Replace + redact_config (~.dlp.RedactConfig): + Redact + character_mask_config (~.dlp.CharacterMaskConfig): + Mask + crypto_replace_ffx_fpe_config (~.dlp.CryptoReplaceFfxFpeConfig): + Ffx-Fpe + fixed_size_bucketing_config (~.dlp.FixedSizeBucketingConfig): + Fixed size bucketing + bucketing_config (~.dlp.BucketingConfig): + Bucketing + replace_with_info_type_config (~.dlp.ReplaceWithInfoTypeConfig): + Replace with infotype + time_part_config (~.dlp.TimePartConfig): + Time extraction + crypto_hash_config (~.dlp.CryptoHashConfig): + Crypto + date_shift_config (~.dlp.DateShiftConfig): + Date Shift + crypto_deterministic_config (~.dlp.CryptoDeterministicConfig): + Deterministic Crypto + """ + + replace_config = proto.Field( + proto.MESSAGE, number=1, oneof="transformation", message="ReplaceValueConfig", + ) + + redact_config = proto.Field( + proto.MESSAGE, number=2, oneof="transformation", message="RedactConfig", + ) + + character_mask_config = proto.Field( + proto.MESSAGE, number=3, oneof="transformation", message="CharacterMaskConfig", + ) + + crypto_replace_ffx_fpe_config = proto.Field( + proto.MESSAGE, + number=4, + oneof="transformation", + message="CryptoReplaceFfxFpeConfig", + ) + + fixed_size_bucketing_config = proto.Field( + proto.MESSAGE, + number=5, + oneof="transformation", + message="FixedSizeBucketingConfig", + ) + + bucketing_config = proto.Field( + proto.MESSAGE, number=6, oneof="transformation", message="BucketingConfig", + ) + + replace_with_info_type_config = proto.Field( + proto.MESSAGE, + number=7, + oneof="transformation", + message="ReplaceWithInfoTypeConfig", + ) + + time_part_config = proto.Field( + proto.MESSAGE, number=8, oneof="transformation", message="TimePartConfig", + ) + + crypto_hash_config = proto.Field( + proto.MESSAGE, number=9, oneof="transformation", message="CryptoHashConfig", + ) + + date_shift_config = proto.Field( + proto.MESSAGE, number=11, oneof="transformation", message="DateShiftConfig", + ) + + crypto_deterministic_config = proto.Field( + proto.MESSAGE, + number=12, + oneof="transformation", + message="CryptoDeterministicConfig", + ) + + +class TimePartConfig(proto.Message): + r"""For use with ``Date``, ``Timestamp``, and ``TimeOfDay``, extract or + preserve a portion of the value. + + Attributes: + part_to_extract (~.dlp.TimePartConfig.TimePart): + The part of the time to keep. + """ + + class TimePart(proto.Enum): + r"""Components that make up time.""" + TIME_PART_UNSPECIFIED = 0 + YEAR = 1 + MONTH = 2 + DAY_OF_MONTH = 3 + DAY_OF_WEEK = 4 + WEEK_OF_YEAR = 5 + HOUR_OF_DAY = 6 + + part_to_extract = proto.Field(proto.ENUM, number=1, enum=TimePart,) + + +class CryptoHashConfig(proto.Message): + r"""Pseudonymization method that generates surrogates via + cryptographic hashing. Uses SHA-256. + The key size must be either 32 or 64 bytes. + Outputs a base64 encoded representation of the hashed output + (for example, L7k0BHmF1ha5U3NfGykjro4xWi1MPVQPjhMAZbSV9mM=). + Currently, only string and integer values can be hashed. See + https://cloud.google.com/dlp/docs/pseudonymization to learn + more. + + Attributes: + crypto_key (~.dlp.CryptoKey): + The key used by the hash function. + """ + + crypto_key = proto.Field(proto.MESSAGE, number=1, message="CryptoKey",) + + +class CryptoDeterministicConfig(proto.Message): + r"""Pseudonymization method that generates deterministic + encryption for the given input. Outputs a base64 encoded + representation of the encrypted output. Uses AES-SIV based on + the RFC https://tools.ietf.org/html/rfc5297. + + Attributes: + crypto_key (~.dlp.CryptoKey): + The key used by the encryption function. + surrogate_info_type (~.storage.InfoType): + The custom info type to annotate the surrogate with. This + annotation will be applied to the surrogate by prefixing it + with the name of the custom info type followed by the number + of characters comprising the surrogate. The following scheme + defines the format: {info type name}({surrogate character + count}):{surrogate} + + For example, if the name of custom info type is + 'MY_TOKEN_INFO_TYPE' and the surrogate is 'abc', the full + replacement value will be: 'MY_TOKEN_INFO_TYPE(3):abc' + + This annotation identifies the surrogate when inspecting + content using the custom info type 'Surrogate'. This + facilitates reversal of the surrogate when it occurs in free + text. + + Note: For record transformations where the entire cell in a + table is being transformed, surrogates are not mandatory. + Surrogates are used to denote the location of the token and + are necessary for re-identification in free form text. + + In order for inspection to work properly, the name of this + info type must not occur naturally anywhere in your data; + otherwise, inspection may either + + - reverse a surrogate that does not correspond to an actual + identifier + - be unable to parse the surrogate and result in an error + + Therefore, choose your custom info type name carefully after + considering what your data looks like. One way to select a + name that has a high chance of yielding reliable detection + is to include one or more unicode characters that are highly + improbable to exist in your data. For example, assuming your + data is entered from a regular ASCII keyboard, the symbol + with the hex code point 29DD might be used like so: + ⧝MY_TOKEN_TYPE. + context (~.storage.FieldId): + A context may be used for higher security and maintaining + referential integrity such that the same identifier in two + different contexts will be given a distinct surrogate. The + context is appended to plaintext value being encrypted. On + decryption the provided context is validated against the + value used during encryption. If a context was provided + during encryption, same context must be provided during + decryption as well. + + If the context is not set, plaintext would be used as is for + encryption. If the context is set but: + + 1. there is no record present when transforming a given + value or + 2. the field is not present when transforming a given value, + + plaintext would be used as is for encryption. + + Note that case (1) is expected when an + ``InfoTypeTransformation`` is applied to both structured and + non-structured ``ContentItem``\ s. + """ + + crypto_key = proto.Field(proto.MESSAGE, number=1, message="CryptoKey",) + + surrogate_info_type = proto.Field( + proto.MESSAGE, number=2, message=storage.InfoType, + ) + + context = proto.Field(proto.MESSAGE, number=3, message=storage.FieldId,) + + +class ReplaceValueConfig(proto.Message): + r"""Replace each input value with a given ``Value``. + + Attributes: + new_value (~.dlp.Value): + Value to replace it with. + """ + + new_value = proto.Field(proto.MESSAGE, number=1, message=Value,) + + +class ReplaceWithInfoTypeConfig(proto.Message): + r"""Replace each matching finding with the name of the info_type.""" + + +class RedactConfig(proto.Message): + r"""Redact a given value. For example, if used with an + ``InfoTypeTransformation`` transforming PHONE_NUMBER, and input 'My + phone number is 206-555-0123', the output would be 'My phone number + is '. + """ + + +class CharsToIgnore(proto.Message): + r"""Characters to skip when doing deidentification of a value. + These will be left alone and skipped. + + Attributes: + characters_to_skip (str): + Characters to not transform when masking. + common_characters_to_ignore (~.dlp.CharsToIgnore.CommonCharsToIgnore): + Common characters to not transform when + masking. Useful to avoid removing punctuation. + """ + + class CommonCharsToIgnore(proto.Enum): + r"""Convenience enum for indication common characters to not + transform. + """ + COMMON_CHARS_TO_IGNORE_UNSPECIFIED = 0 + NUMERIC = 1 + ALPHA_UPPER_CASE = 2 + ALPHA_LOWER_CASE = 3 + PUNCTUATION = 4 + WHITESPACE = 5 + + characters_to_skip = proto.Field(proto.STRING, number=1, oneof="characters") + + common_characters_to_ignore = proto.Field( + proto.ENUM, number=2, oneof="characters", enum=CommonCharsToIgnore, + ) + + +class CharacterMaskConfig(proto.Message): + r"""Partially mask a string by replacing a given number of characters + with a fixed character. Masking can start from the beginning or end + of the string. This can be used on data of any type (numbers, longs, + and so on) and when de-identifying structured data we'll attempt to + preserve the original data's type. (This allows you to take a long + like 123 and modify it to a string like \**3. + + Attributes: + masking_character (str): + Character to use to mask the sensitive values—for example, + ``*`` for an alphabetic string such as a name, or ``0`` for + a numeric string such as ZIP code or credit card number. + This string must have a length of 1. If not supplied, this + value defaults to ``*`` for strings, and ``0`` for digits. + number_to_mask (int): + Number of characters to mask. If not set, all + matching chars will be masked. Skipped + characters do not count towards this tally. + reverse_order (bool): + Mask characters in reverse order. For example, if + ``masking_character`` is ``0``, ``number_to_mask`` is + ``14``, and ``reverse_order`` is ``false``, then the input + string ``1234-5678-9012-3456`` is masked as + ``00000000000000-3456``. If ``masking_character`` is ``*``, + ``number_to_mask`` is ``3``, and ``reverse_order`` is + ``true``, then the string ``12345`` is masked as ``12***``. + characters_to_ignore (Sequence[~.dlp.CharsToIgnore]): + When masking a string, items in this list will be skipped + when replacing characters. For example, if the input string + is ``555-555-5555`` and you instruct Cloud DLP to skip ``-`` + and mask 5 characters with ``*``, Cloud DLP returns + ``***-**5-5555``. + """ + + masking_character = proto.Field(proto.STRING, number=1) + + number_to_mask = proto.Field(proto.INT32, number=2) + + reverse_order = proto.Field(proto.BOOL, number=3) + + characters_to_ignore = proto.RepeatedField( + proto.MESSAGE, number=4, message=CharsToIgnore, + ) + + +class FixedSizeBucketingConfig(proto.Message): + r"""Buckets values based on fixed size ranges. The Bucketing + transformation can provide all of this functionality, but requires + more configuration. This message is provided as a convenience to the + user for simple bucketing strategies. + + The transformed value will be a hyphenated string of + {lower_bound}-{upper_bound}, i.e if lower_bound = 10 and upper_bound + = 20 all values that are within this bucket will be replaced with + "10-20". + + This can be used on data of type: double, long. + + If the bound Value type differs from the type of data being + transformed, we will first attempt converting the type of the data + to be transformed to match the type of the bound before comparing. + + See https://cloud.google.com/dlp/docs/concepts-bucketing to learn + more. + + Attributes: + lower_bound (~.dlp.Value): + Required. Lower bound value of buckets. All values less than + ``lower_bound`` are grouped together into a single bucket; + for example if ``lower_bound`` = 10, then all values less + than 10 are replaced with the value "-10". + upper_bound (~.dlp.Value): + Required. Upper bound value of buckets. All values greater + than upper_bound are grouped together into a single bucket; + for example if ``upper_bound`` = 89, then all values greater + than 89 are replaced with the value "89+". + bucket_size (float): + Required. Size of each bucket (except for minimum and + maximum buckets). So if ``lower_bound`` = 10, + ``upper_bound`` = 89, and ``bucket_size`` = 10, then the + following buckets would be used: -10, 10-20, 20-30, 30-40, + 40-50, 50-60, 60-70, 70-80, 80-89, 89+. Precision up to 2 + decimals works. + """ + + lower_bound = proto.Field(proto.MESSAGE, number=1, message=Value,) + + upper_bound = proto.Field(proto.MESSAGE, number=2, message=Value,) + + bucket_size = proto.Field(proto.DOUBLE, number=3) + + +class BucketingConfig(proto.Message): + r"""Generalization function that buckets values based on ranges. The + ranges and replacement values are dynamically provided by the user + for custom behavior, such as 1-30 -> LOW 31-65 -> MEDIUM 66-100 -> + HIGH This can be used on data of type: number, long, string, + timestamp. If the bound ``Value`` type differs from the type of data + being transformed, we will first attempt converting the type of the + data to be transformed to match the type of the bound before + comparing. See https://cloud.google.com/dlp/docs/concepts-bucketing + to learn more. + + Attributes: + buckets (Sequence[~.dlp.BucketingConfig.Bucket]): + Set of buckets. Ranges must be non- + verlapping. + """ + + class Bucket(proto.Message): + r"""Bucket is represented as a range, along with replacement + values. + + Attributes: + min (~.dlp.Value): + Lower bound of the range, inclusive. Type + should be the same as max if used. + max (~.dlp.Value): + Upper bound of the range, exclusive; type + must match min. + replacement_value (~.dlp.Value): + Required. Replacement value for this bucket. + """ + + min = proto.Field(proto.MESSAGE, number=1, message=Value,) + + max = proto.Field(proto.MESSAGE, number=2, message=Value,) + + replacement_value = proto.Field(proto.MESSAGE, number=3, message=Value,) + + buckets = proto.RepeatedField(proto.MESSAGE, number=1, message=Bucket,) + + +class CryptoReplaceFfxFpeConfig(proto.Message): + r"""Replaces an identifier with a surrogate using Format Preserving + Encryption (FPE) with the FFX mode of operation; however when used + in the ``ReidentifyContent`` API method, it serves the opposite + function by reversing the surrogate back into the original + identifier. The identifier must be encoded as ASCII. For a given + crypto key and context, the same identifier will be replaced with + the same surrogate. Identifiers must be at least two characters + long. In the case that the identifier is the empty string, it will + be skipped. See https://cloud.google.com/dlp/docs/pseudonymization + to learn more. + + Note: We recommend using CryptoDeterministicConfig for all use cases + which do not require preserving the input alphabet space and size, + plus warrant referential integrity. + + Attributes: + crypto_key (~.dlp.CryptoKey): + Required. The key used by the encryption + algorithm. + context (~.storage.FieldId): + The 'tweak', a context may be used for higher security since + the same identifier in two different contexts won't be given + the same surrogate. If the context is not set, a default + tweak will be used. + + If the context is set but: + + 1. there is no record present when transforming a given + value or + 2. the field is not present when transforming a given value, + + a default tweak will be used. + + Note that case (1) is expected when an + ``InfoTypeTransformation`` is applied to both structured and + non-structured ``ContentItem``\ s. Currently, the referenced + field may be of value type integer or string. + + The tweak is constructed as a sequence of bytes in big + endian byte order such that: + + - a 64 bit integer is encoded followed by a single byte of + value 1 + - a string is encoded in UTF-8 format followed by a single + byte of value 2 + common_alphabet (~.dlp.CryptoReplaceFfxFpeConfig.FfxCommonNativeAlphabet): + Common alphabets. + custom_alphabet (str): + This is supported by mapping these to the alphanumeric + characters that the FFX mode natively supports. This happens + before/after encryption/decryption. Each character listed + must appear only once. Number of characters must be in the + range [2, 95]. This must be encoded as ASCII. The order of + characters does not matter. + radix (int): + The native way to select the alphabet. Must be in the range + [2, 95]. + surrogate_info_type (~.storage.InfoType): + The custom infoType to annotate the surrogate with. This + annotation will be applied to the surrogate by prefixing it + with the name of the custom infoType followed by the number + of characters comprising the surrogate. The following scheme + defines the format: + info_type_name(surrogate_character_count):surrogate + + For example, if the name of custom infoType is + 'MY_TOKEN_INFO_TYPE' and the surrogate is 'abc', the full + replacement value will be: 'MY_TOKEN_INFO_TYPE(3):abc' + + This annotation identifies the surrogate when inspecting + content using the custom infoType + ```SurrogateType`` `__. + This facilitates reversal of the surrogate when it occurs in + free text. + + In order for inspection to work properly, the name of this + infoType must not occur naturally anywhere in your data; + otherwise, inspection may find a surrogate that does not + correspond to an actual identifier. Therefore, choose your + custom infoType name carefully after considering what your + data looks like. One way to select a name that has a high + chance of yielding reliable detection is to include one or + more unicode characters that are highly improbable to exist + in your data. For example, assuming your data is entered + from a regular ASCII keyboard, the symbol with the hex code + point 29DD might be used like so: ⧝MY_TOKEN_TYPE + """ + + class FfxCommonNativeAlphabet(proto.Enum): + r"""These are commonly used subsets of the alphabet that the FFX + mode natively supports. In the algorithm, the alphabet is + selected using the "radix". Therefore each corresponds to + particular radix. + """ + FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED = 0 + NUMERIC = 1 + HEXADECIMAL = 2 + UPPER_CASE_ALPHA_NUMERIC = 3 + ALPHA_NUMERIC = 4 + + crypto_key = proto.Field(proto.MESSAGE, number=1, message="CryptoKey",) + + context = proto.Field(proto.MESSAGE, number=2, message=storage.FieldId,) + + common_alphabet = proto.Field( + proto.ENUM, number=4, oneof="alphabet", enum=FfxCommonNativeAlphabet, + ) + + custom_alphabet = proto.Field(proto.STRING, number=5, oneof="alphabet") + + radix = proto.Field(proto.INT32, number=6, oneof="alphabet") + + surrogate_info_type = proto.Field( + proto.MESSAGE, number=8, message=storage.InfoType, + ) + + +class CryptoKey(proto.Message): + r"""This is a data encryption key (DEK) (as opposed to + a key encryption key (KEK) stored by KMS). + When using KMS to wrap/unwrap DEKs, be sure to set an + appropriate IAM policy on the KMS CryptoKey (KEK) to ensure an + attacker cannot unwrap the data crypto key. + + Attributes: + transient (~.dlp.TransientCryptoKey): + Transient crypto key + unwrapped (~.dlp.UnwrappedCryptoKey): + Unwrapped crypto key + kms_wrapped (~.dlp.KmsWrappedCryptoKey): + Kms wrapped key + """ + + transient = proto.Field( + proto.MESSAGE, number=1, oneof="source", message="TransientCryptoKey", + ) + + unwrapped = proto.Field( + proto.MESSAGE, number=2, oneof="source", message="UnwrappedCryptoKey", + ) + + kms_wrapped = proto.Field( + proto.MESSAGE, number=3, oneof="source", message="KmsWrappedCryptoKey", + ) + + +class TransientCryptoKey(proto.Message): + r"""Use this to have a random data crypto key generated. + It will be discarded after the request finishes. + + Attributes: + name (str): + Required. Name of the key. This is an arbitrary string used + to differentiate different keys. A unique key is generated + per name: two separate ``TransientCryptoKey`` protos share + the same generated key if their names are the same. When the + data crypto key is generated, this name is not used in any + way (repeating the api call will result in a different key + being generated). + """ + + name = proto.Field(proto.STRING, number=1) + + +class UnwrappedCryptoKey(proto.Message): + r"""Using raw keys is prone to security risks due to accidentally + leaking the key. Choose another type of key if possible. + + Attributes: + key (bytes): + Required. A 128/192/256 bit key. + """ + + key = proto.Field(proto.BYTES, number=1) + + +class KmsWrappedCryptoKey(proto.Message): + r"""Include to use an existing data crypto key wrapped by KMS. + The wrapped key must be a 128/192/256 bit key. + Authorization requires the following IAM permissions when + sending a request to perform a crypto transformation using a + kms-wrapped crypto key: dlp.kms.encrypt + + Attributes: + wrapped_key (bytes): + Required. The wrapped data crypto key. + crypto_key_name (str): + Required. The resource name of the KMS + CryptoKey to use for unwrapping. + """ + + wrapped_key = proto.Field(proto.BYTES, number=1) + + crypto_key_name = proto.Field(proto.STRING, number=2) + + +class DateShiftConfig(proto.Message): + r"""Shifts dates by random number of days, with option to be + consistent for the same context. See + https://cloud.google.com/dlp/docs/concepts-date-shifting to + learn more. + + Attributes: + upper_bound_days (int): + Required. Range of shift in days. Actual + shift will be selected at random within this + range (inclusive ends). Negative means shift to + earlier in time. Must not be more than 365250 + days (1000 years) each direction. + For example, 3 means shift date to at most 3 + days into the future. + lower_bound_days (int): + Required. For example, -5 means shift date to + at most 5 days back in the past. + context (~.storage.FieldId): + Points to the field that contains the + context, for example, an entity id. If set, must + also set cryptoKey. If set, shift will be + consistent for the given context. + crypto_key (~.dlp.CryptoKey): + Causes the shift to be computed based on this key and the + context. This results in the same shift for the same context + and crypto_key. If set, must also set context. Can only be + applied to table items. + """ + + upper_bound_days = proto.Field(proto.INT32, number=1) + + lower_bound_days = proto.Field(proto.INT32, number=2) + + context = proto.Field(proto.MESSAGE, number=3, message=storage.FieldId,) + + crypto_key = proto.Field( + proto.MESSAGE, number=4, oneof="method", message=CryptoKey, + ) + + +class InfoTypeTransformations(proto.Message): + r"""A type of transformation that will scan unstructured text and apply + various ``PrimitiveTransformation``\ s to each finding, where the + transformation is applied to only values that were identified as a + specific info_type. + + Attributes: + transformations (Sequence[~.dlp.InfoTypeTransformations.InfoTypeTransformation]): + Required. Transformation for each infoType. + Cannot specify more than one for a given + infoType. + """ + + class InfoTypeTransformation(proto.Message): + r"""A transformation to apply to text that is identified as a specific + info_type. + + Attributes: + info_types (Sequence[~.storage.InfoType]): + InfoTypes to apply the transformation to. An empty list will + cause this transformation to apply to all findings that + correspond to infoTypes that were requested in + ``InspectConfig``. + primitive_transformation (~.dlp.PrimitiveTransformation): + Required. Primitive transformation to apply + to the infoType. + """ + + info_types = proto.RepeatedField( + proto.MESSAGE, number=1, message=storage.InfoType, + ) + + primitive_transformation = proto.Field( + proto.MESSAGE, number=2, message=PrimitiveTransformation, + ) + + transformations = proto.RepeatedField( + proto.MESSAGE, number=1, message=InfoTypeTransformation, + ) + + +class FieldTransformation(proto.Message): + r"""The transformation to apply to the field. + + Attributes: + fields (Sequence[~.storage.FieldId]): + Required. Input field(s) to apply the + transformation to. + condition (~.dlp.RecordCondition): + Only apply the transformation if the condition evaluates to + true for the given ``RecordCondition``. The conditions are + allowed to reference fields that are not used in the actual + transformation. + + Example Use Cases: + + - Apply a different bucket transformation to an age column + if the zip code column for the same record is within a + specific range. + - Redact a field if the date of birth field is greater than + 85. + primitive_transformation (~.dlp.PrimitiveTransformation): + Apply the transformation to the entire field. + info_type_transformations (~.dlp.InfoTypeTransformations): + Treat the contents of the field as free text, and + selectively transform content that matches an ``InfoType``. + """ + + fields = proto.RepeatedField(proto.MESSAGE, number=1, message=storage.FieldId,) + + condition = proto.Field(proto.MESSAGE, number=3, message="RecordCondition",) + + primitive_transformation = proto.Field( + proto.MESSAGE, + number=4, + oneof="transformation", + message=PrimitiveTransformation, + ) + + info_type_transformations = proto.Field( + proto.MESSAGE, + number=5, + oneof="transformation", + message=InfoTypeTransformations, + ) + + +class RecordTransformations(proto.Message): + r"""A type of transformation that is applied over structured data + such as a table. + + Attributes: + field_transformations (Sequence[~.dlp.FieldTransformation]): + Transform the record by applying various + field transformations. + record_suppressions (Sequence[~.dlp.RecordSuppression]): + Configuration defining which records get + suppressed entirely. Records that match any + suppression rule are omitted from the output. + """ + + field_transformations = proto.RepeatedField( + proto.MESSAGE, number=1, message=FieldTransformation, + ) + + record_suppressions = proto.RepeatedField( + proto.MESSAGE, number=2, message="RecordSuppression", + ) + + +class RecordSuppression(proto.Message): + r"""Configuration to suppress records whose suppression + conditions evaluate to true. + + Attributes: + condition (~.dlp.RecordCondition): + A condition that when it evaluates to true + will result in the record being evaluated to be + suppressed from the transformed content. + """ + + condition = proto.Field(proto.MESSAGE, number=1, message="RecordCondition",) + + +class RecordCondition(proto.Message): + r"""A condition for determining whether a transformation should + be applied to a field. + + Attributes: + expressions (~.dlp.RecordCondition.Expressions): + An expression. + """ + + class Condition(proto.Message): + r"""The field type of ``value`` and ``field`` do not need to match to be + considered equal, but not all comparisons are possible. EQUAL_TO and + NOT_EQUAL_TO attempt to compare even with incompatible types, but + all other comparisons are invalid with incompatible types. A + ``value`` of type: + + - ``string`` can be compared against all other types + - ``boolean`` can only be compared against other booleans + - ``integer`` can be compared against doubles or a string if the + string value can be parsed as an integer. + - ``double`` can be compared against integers or a string if the + string can be parsed as a double. + - ``Timestamp`` can be compared against strings in RFC 3339 date + string format. + - ``TimeOfDay`` can be compared against timestamps and strings in + the format of 'HH:mm:ss'. + + If we fail to compare do to type mismatch, a warning will be given + and the condition will evaluate to false. + + Attributes: + field (~.storage.FieldId): + Required. Field within the record this + condition is evaluated against. + operator (~.dlp.RelationalOperator): + Required. Operator used to compare the field + or infoType to the value. + value (~.dlp.Value): + Value to compare against. [Mandatory, except for ``EXISTS`` + tests.] + """ + + field = proto.Field(proto.MESSAGE, number=1, message=storage.FieldId,) + + operator = proto.Field(proto.ENUM, number=3, enum="RelationalOperator",) + + value = proto.Field(proto.MESSAGE, number=4, message=Value,) + + class Conditions(proto.Message): + r"""A collection of conditions. + + Attributes: + conditions (Sequence[~.dlp.RecordCondition.Condition]): + A collection of conditions. + """ + + conditions = proto.RepeatedField( + proto.MESSAGE, number=1, message="RecordCondition.Condition", + ) + + class Expressions(proto.Message): + r"""An expression, consisting or an operator and conditions. + + Attributes: + logical_operator (~.dlp.RecordCondition.Expressions.LogicalOperator): + The operator to apply to the result of conditions. Default + and currently only supported value is ``AND``. + conditions (~.dlp.RecordCondition.Conditions): + Conditions to apply to the expression. + """ + + class LogicalOperator(proto.Enum): + r"""Logical operators for conditional checks.""" + LOGICAL_OPERATOR_UNSPECIFIED = 0 + AND = 1 + + logical_operator = proto.Field( + proto.ENUM, number=1, enum="RecordCondition.Expressions.LogicalOperator", + ) + + conditions = proto.Field( + proto.MESSAGE, number=3, oneof="type", message="RecordCondition.Conditions", + ) + + expressions = proto.Field(proto.MESSAGE, number=3, message=Expressions,) + + +class TransformationOverview(proto.Message): + r"""Overview of the modifications that occurred. + + Attributes: + transformed_bytes (int): + Total size in bytes that were transformed in + some way. + transformation_summaries (Sequence[~.dlp.TransformationSummary]): + Transformations applied to the dataset. + """ + + transformed_bytes = proto.Field(proto.INT64, number=2) + + transformation_summaries = proto.RepeatedField( + proto.MESSAGE, number=3, message="TransformationSummary", + ) + + +class TransformationSummary(proto.Message): + r"""Summary of a single transformation. Only one of 'transformation', + 'field_transformation', or 'record_suppress' will be set. + + Attributes: + info_type (~.storage.InfoType): + Set if the transformation was limited to a + specific InfoType. + field (~.storage.FieldId): + Set if the transformation was limited to a + specific FieldId. + transformation (~.dlp.PrimitiveTransformation): + The specific transformation these stats apply + to. + field_transformations (Sequence[~.dlp.FieldTransformation]): + The field transformation that was applied. + If multiple field transformations are requested + for a single field, this list will contain all + of them; otherwise, only one is supplied. + record_suppress (~.dlp.RecordSuppression): + The specific suppression option these stats + apply to. + results (Sequence[~.dlp.TransformationSummary.SummaryResult]): + Collection of all transformations that took + place or had an error. + transformed_bytes (int): + Total size in bytes that were transformed in + some way. + """ + + class TransformationResultCode(proto.Enum): + r"""Possible outcomes of transformations.""" + TRANSFORMATION_RESULT_CODE_UNSPECIFIED = 0 + SUCCESS = 1 + ERROR = 2 + + class SummaryResult(proto.Message): + r"""A collection that informs the user the number of times a particular + ``TransformationResultCode`` and error details occurred. + + Attributes: + count (int): + Number of transformations counted by this + result. + code (~.dlp.TransformationSummary.TransformationResultCode): + Outcome of the transformation. + details (str): + A place for warnings or errors to show up if + a transformation didn't work as expected. + """ + + count = proto.Field(proto.INT64, number=1) + + code = proto.Field( + proto.ENUM, number=2, enum="TransformationSummary.TransformationResultCode", + ) + + details = proto.Field(proto.STRING, number=3) + + info_type = proto.Field(proto.MESSAGE, number=1, message=storage.InfoType,) + + field = proto.Field(proto.MESSAGE, number=2, message=storage.FieldId,) + + transformation = proto.Field( + proto.MESSAGE, number=3, message=PrimitiveTransformation, + ) + + field_transformations = proto.RepeatedField( + proto.MESSAGE, number=5, message=FieldTransformation, + ) + + record_suppress = proto.Field(proto.MESSAGE, number=6, message=RecordSuppression,) + + results = proto.RepeatedField(proto.MESSAGE, number=4, message=SummaryResult,) + + transformed_bytes = proto.Field(proto.INT64, number=7) + + +class Schedule(proto.Message): + r"""Schedule for triggeredJobs. + + Attributes: + recurrence_period_duration (~.duration.Duration): + With this option a job is started a regular + periodic basis. For example: every day (86400 + seconds). + A scheduled start time will be skipped if the + previous execution has not ended when its + scheduled time occurs. + This value must be set to a time duration + greater than or equal to 1 day and can be no + longer than 60 days. + """ + + recurrence_period_duration = proto.Field( + proto.MESSAGE, number=1, oneof="option", message=duration.Duration, + ) + + +class Manual(proto.Message): + r"""Job trigger option for hybrid jobs. Jobs must be manually + created and finished. + """ + + +class InspectTemplate(proto.Message): + r"""The inspectTemplate contains a configuration (set of types of + sensitive data to be detected) to be used anywhere you otherwise + would normally specify InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates to learn + more. + + Attributes: + name (str): + Output only. The template name. + + The template will have one of the following formats: + ``projects/PROJECT_ID/inspectTemplates/TEMPLATE_ID`` OR + ``organizations/ORGANIZATION_ID/inspectTemplates/TEMPLATE_ID``; + display_name (str): + Display name (max 256 chars). + description (str): + Short description (max 256 chars). + create_time (~.timestamp.Timestamp): + Output only. The creation timestamp of an + inspectTemplate. + update_time (~.timestamp.Timestamp): + Output only. The last update timestamp of an + inspectTemplate. + inspect_config (~.dlp.InspectConfig): + The core content of the template. + Configuration of the scanning process. + """ + + name = proto.Field(proto.STRING, number=1) + + display_name = proto.Field(proto.STRING, number=2) + + description = proto.Field(proto.STRING, number=3) + + create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) + + update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) + + inspect_config = proto.Field(proto.MESSAGE, number=6, message=InspectConfig,) + + +class DeidentifyTemplate(proto.Message): + r"""DeidentifyTemplates contains instructions on how to de- + dentify content. See https://cloud.google.com/dlp/docs/concepts- + templates to learn more. + + Attributes: + name (str): + Output only. The template name. + + The template will have one of the following formats: + ``projects/PROJECT_ID/deidentifyTemplates/TEMPLATE_ID`` OR + ``organizations/ORGANIZATION_ID/deidentifyTemplates/TEMPLATE_ID`` + display_name (str): + Display name (max 256 chars). + description (str): + Short description (max 256 chars). + create_time (~.timestamp.Timestamp): + Output only. The creation timestamp of an + inspectTemplate. + update_time (~.timestamp.Timestamp): + Output only. The last update timestamp of an + inspectTemplate. + deidentify_config (~.dlp.DeidentifyConfig): + ///////////// // The core content of the + template // /////////////// + """ + + name = proto.Field(proto.STRING, number=1) + + display_name = proto.Field(proto.STRING, number=2) + + description = proto.Field(proto.STRING, number=3) + + create_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,) + + update_time = proto.Field(proto.MESSAGE, number=5, message=timestamp.Timestamp,) + + deidentify_config = proto.Field(proto.MESSAGE, number=6, message=DeidentifyConfig,) + + +class Error(proto.Message): + r"""Details information about an error encountered during job + execution or the results of an unsuccessful activation of the + JobTrigger. + + Attributes: + details (~.gr_status.Status): + Detailed error codes and messages. + timestamps (Sequence[~.timestamp.Timestamp]): + The times the error occurred. + """ + + details = proto.Field(proto.MESSAGE, number=1, message=gr_status.Status,) + + timestamps = proto.RepeatedField( + proto.MESSAGE, number=2, message=timestamp.Timestamp, + ) + + +class JobTrigger(proto.Message): + r"""Contains a configuration to make dlp api calls on a repeating + basis. See https://cloud.google.com/dlp/docs/concepts-job- + triggers to learn more. + + Attributes: + name (str): + Unique resource name for the triggeredJob, assigned by the + service when the triggeredJob is created, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + display_name (str): + Display name (max 100 chars) + description (str): + User provided description (max 256 chars) + inspect_job (~.dlp.InspectJobConfig): + For inspect jobs, a snapshot of the + configuration. + triggers (Sequence[~.dlp.JobTrigger.Trigger]): + A list of triggers which will be OR'ed + together. Only one in the list needs to trigger + for a job to be started. The list may contain + only a single Schedule trigger and must have at + least one object. + errors (Sequence[~.dlp.Error]): + Output only. A stream of errors encountered + when the trigger was activated. Repeated errors + may result in the JobTrigger automatically being + paused. Will return the last 100 errors. + Whenever the JobTrigger is modified this list + will be cleared. + create_time (~.timestamp.Timestamp): + Output only. The creation timestamp of a + triggeredJob. + update_time (~.timestamp.Timestamp): + Output only. The last update timestamp of a + triggeredJob. + last_run_time (~.timestamp.Timestamp): + Output only. The timestamp of the last time + this trigger executed. + status (~.dlp.JobTrigger.Status): + Required. A status for this trigger. + """ + + class Status(proto.Enum): + r"""Whether the trigger is currently active. If PAUSED or + CANCELLED, no jobs will be created with this configuration. The + service may automatically pause triggers experiencing frequent + errors. To restart a job, set the status to HEALTHY after + correcting user errors. + """ + STATUS_UNSPECIFIED = 0 + HEALTHY = 1 + PAUSED = 2 + CANCELLED = 3 + + class Trigger(proto.Message): + r"""What event needs to occur for a new job to be started. + + Attributes: + schedule (~.dlp.Schedule): + Create a job on a repeating basis based on + the elapse of time. + manual (~.dlp.Manual): + For use with hybrid jobs. Jobs must be + manually created and finished. Early access + feature is in a pre-release state and might + change or have limited support. For more + information, see + https://cloud.google.com/products#product- + launch-stages. + """ + + schedule = proto.Field( + proto.MESSAGE, number=1, oneof="trigger", message=Schedule, + ) + + manual = proto.Field(proto.MESSAGE, number=2, oneof="trigger", message=Manual,) + + name = proto.Field(proto.STRING, number=1) + + display_name = proto.Field(proto.STRING, number=2) + + description = proto.Field(proto.STRING, number=3) + + inspect_job = proto.Field( + proto.MESSAGE, number=4, oneof="job", message="InspectJobConfig", + ) + + triggers = proto.RepeatedField(proto.MESSAGE, number=5, message=Trigger,) + + errors = proto.RepeatedField(proto.MESSAGE, number=6, message=Error,) + + create_time = proto.Field(proto.MESSAGE, number=7, message=timestamp.Timestamp,) + + update_time = proto.Field(proto.MESSAGE, number=8, message=timestamp.Timestamp,) + + last_run_time = proto.Field(proto.MESSAGE, number=9, message=timestamp.Timestamp,) + + status = proto.Field(proto.ENUM, number=10, enum=Status,) + + +class Action(proto.Message): + r"""A task to execute on the completion of a job. + See https://cloud.google.com/dlp/docs/concepts-actions to learn + more. + + Attributes: + save_findings (~.dlp.Action.SaveFindings): + Save resulting findings in a provided + location. + pub_sub (~.dlp.Action.PublishToPubSub): + Publish a notification to a pubsub topic. + publish_summary_to_cscc (~.dlp.Action.PublishSummaryToCscc): + Publish summary to Cloud Security Command + Center (Alpha). + publish_findings_to_cloud_data_catalog (~.dlp.Action.PublishFindingsToCloudDataCatalog): + Publish findings to Cloud Datahub. + job_notification_emails (~.dlp.Action.JobNotificationEmails): + Enable email notification for project owners + and editors on job's completion/failure. + publish_to_stackdriver (~.dlp.Action.PublishToStackdriver): + Enable Stackdriver metric dlp.googleapis.com/finding_count. + """ + + class SaveFindings(proto.Message): + r"""If set, the detailed findings will be persisted to the + specified OutputStorageConfig. Only a single instance of this + action can be specified. + Compatible with: Inspect, Risk + + Attributes: + output_config (~.dlp.OutputStorageConfig): + Location to store findings outside of DLP. + """ + + output_config = proto.Field( + proto.MESSAGE, number=1, message=OutputStorageConfig, + ) + + class PublishToPubSub(proto.Message): + r"""Publish a message into given Pub/Sub topic when DlpJob has + completed. The message contains a single field, ``DlpJobName``, + which is equal to the finished job's + ```DlpJob.name`` `__. + Compatible with: Inspect, Risk + + Attributes: + topic (str): + Cloud Pub/Sub topic to send notifications to. + The topic must have given publishing access + rights to the DLP API service account executing + the long running DlpJob sending the + notifications. Format is + projects/{project}/topics/{topic}. + """ + + topic = proto.Field(proto.STRING, number=1) + + class PublishSummaryToCscc(proto.Message): + r"""Publish the result summary of a DlpJob to the Cloud Security + Command Center (CSCC Alpha). + This action is only available for projects which are parts of an + organization and whitelisted for the alpha Cloud Security + Command Center. + The action will publish count of finding instances and their + info types. The summary of findings will be persisted in CSCC + and are governed by CSCC service-specific policy, see + https://cloud.google.com/terms/service-terms Only a single + instance of this action can be specified. Compatible with: + Inspect + """ + + class PublishFindingsToCloudDataCatalog(proto.Message): + r"""Publish findings of a DlpJob to Cloud Data Catalog. Labels + summarizing the results of the DlpJob will be applied to the + entry for the resource scanned in Cloud Data Catalog. Any labels + previously written by another DlpJob will be deleted. InfoType + naming patterns are strictly enforced when using this feature. + Note that the findings will be persisted in Cloud Data Catalog + storage and are governed by Data Catalog service-specific + policy, see https://cloud.google.com/terms/service-terms + Only a single instance of this action can be specified and only + allowed if all resources being scanned are BigQuery tables. + Compatible with: Inspect + """ + + class JobNotificationEmails(proto.Message): + r"""Enable email notification to project owners and editors on + jobs's completion/failure. + """ + + class PublishToStackdriver(proto.Message): + r"""Enable Stackdriver metric dlp.googleapis.com/finding_count. This + will publish a metric to stack driver on each infotype requested and + how many findings were found for it. CustomDetectors will be + bucketed as 'Custom' under the Stackdriver label 'info_type'. + """ + + save_findings = proto.Field( + proto.MESSAGE, number=1, oneof="action", message=SaveFindings, + ) + + pub_sub = proto.Field( + proto.MESSAGE, number=2, oneof="action", message=PublishToPubSub, + ) + + publish_summary_to_cscc = proto.Field( + proto.MESSAGE, number=3, oneof="action", message=PublishSummaryToCscc, + ) + + publish_findings_to_cloud_data_catalog = proto.Field( + proto.MESSAGE, + number=5, + oneof="action", + message=PublishFindingsToCloudDataCatalog, + ) + + job_notification_emails = proto.Field( + proto.MESSAGE, number=8, oneof="action", message=JobNotificationEmails, + ) + + publish_to_stackdriver = proto.Field( + proto.MESSAGE, number=9, oneof="action", message=PublishToStackdriver, + ) + + +class CreateInspectTemplateRequest(proto.Message): + r"""Request message for CreateInspectTemplate. + + Attributes: + parent (str): + Required. Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:organizations/[ORGANIZATION-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + inspect_template (~.dlp.InspectTemplate): + Required. The InspectTemplate to create. + template_id (str): + The template id can contain uppercase and lowercase letters, + numbers, and hyphens; that is, it must match the regular + expression: ``[a-zA-Z\\d-_]+``. The maximum length is 100 + characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent = proto.Field(proto.STRING, number=1) + + inspect_template = proto.Field(proto.MESSAGE, number=2, message=InspectTemplate,) + + template_id = proto.Field(proto.STRING, number=3) + + location_id = proto.Field(proto.STRING, number=4) + + +class UpdateInspectTemplateRequest(proto.Message): + r"""Request message for UpdateInspectTemplate. + + Attributes: + name (str): + Required. Resource name of organization and inspectTemplate + to be updated, for example + ``organizations/433245324/inspectTemplates/432452342`` or + projects/project-id/inspectTemplates/432452342. + inspect_template (~.dlp.InspectTemplate): + New InspectTemplate value. + update_mask (~.field_mask.FieldMask): + Mask to control which fields get updated. + """ + + name = proto.Field(proto.STRING, number=1) + + inspect_template = proto.Field(proto.MESSAGE, number=2, message=InspectTemplate,) + + update_mask = proto.Field(proto.MESSAGE, number=3, message=field_mask.FieldMask,) + + +class GetInspectTemplateRequest(proto.Message): + r"""Request message for GetInspectTemplate. + + Attributes: + name (str): + Required. Resource name of the organization and + inspectTemplate to be read, for example + ``organizations/433245324/inspectTemplates/432452342`` or + projects/project-id/inspectTemplates/432452342. + """ + + name = proto.Field(proto.STRING, number=1) + + +class ListInspectTemplatesRequest(proto.Message): + r"""Request message for ListInspectTemplates. + + Attributes: + parent (str): + Required. Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:organizations/[ORGANIZATION-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + page_token (str): + Page token to continue retrieval. Comes from previous call + to ``ListInspectTemplates``. + page_size (int): + Size of the page, can be limited by server. + If zero server returns a page of max size 100. + order_by (str): + Comma separated list of fields to order by, followed by + ``asc`` or ``desc`` postfix. This list is case-insensitive, + default sorting order is ascending, redundant space + characters are insignificant. + + Example: ``name asc,update_time, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to time the template was + created. + - ``update_time``: corresponds to time the template was + last updated. + - ``name``: corresponds to template's name. + - ``display_name``: corresponds to template's display name. + location_id (str): + Deprecated. This field has no effect. + """ + + parent = proto.Field(proto.STRING, number=1) + + page_token = proto.Field(proto.STRING, number=2) + + page_size = proto.Field(proto.INT32, number=3) + + order_by = proto.Field(proto.STRING, number=4) + + location_id = proto.Field(proto.STRING, number=5) + + +class ListInspectTemplatesResponse(proto.Message): + r"""Response message for ListInspectTemplates. + + Attributes: + inspect_templates (Sequence[~.dlp.InspectTemplate]): + List of inspectTemplates, up to page_size in + ListInspectTemplatesRequest. + next_page_token (str): + If the next page is available then the next + page token to be used in following + ListInspectTemplates request. + """ + + @property + def raw_page(self): + return self + + inspect_templates = proto.RepeatedField( + proto.MESSAGE, number=1, message=InspectTemplate, + ) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class DeleteInspectTemplateRequest(proto.Message): + r"""Request message for DeleteInspectTemplate. + + Attributes: + name (str): + Required. Resource name of the organization and + inspectTemplate to be deleted, for example + ``organizations/433245324/inspectTemplates/432452342`` or + projects/project-id/inspectTemplates/432452342. + """ + + name = proto.Field(proto.STRING, number=1) + + +class CreateJobTriggerRequest(proto.Message): + r"""Request message for CreateJobTrigger. + + Attributes: + parent (str): + Required. Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + job_trigger (~.dlp.JobTrigger): + Required. The JobTrigger to create. + trigger_id (str): + The trigger id can contain uppercase and lowercase letters, + numbers, and hyphens; that is, it must match the regular + expression: ``[a-zA-Z\\d-_]+``. The maximum length is 100 + characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent = proto.Field(proto.STRING, number=1) + + job_trigger = proto.Field(proto.MESSAGE, number=2, message=JobTrigger,) + + trigger_id = proto.Field(proto.STRING, number=3) + + location_id = proto.Field(proto.STRING, number=4) + + +class ActivateJobTriggerRequest(proto.Message): + r"""Request message for ActivateJobTrigger. + + Attributes: + name (str): + Required. Resource name of the trigger to activate, for + example ``projects/dlp-test-project/jobTriggers/53234423``. + """ + + name = proto.Field(proto.STRING, number=1) + + +class UpdateJobTriggerRequest(proto.Message): + r"""Request message for UpdateJobTrigger. + + Attributes: + name (str): + Required. Resource name of the project and the triggeredJob, + for example + ``projects/dlp-test-project/jobTriggers/53234423``. + job_trigger (~.dlp.JobTrigger): + New JobTrigger value. + update_mask (~.field_mask.FieldMask): + Mask to control which fields get updated. + """ + + name = proto.Field(proto.STRING, number=1) + + job_trigger = proto.Field(proto.MESSAGE, number=2, message=JobTrigger,) + + update_mask = proto.Field(proto.MESSAGE, number=3, message=field_mask.FieldMask,) + + +class GetJobTriggerRequest(proto.Message): + r"""Request message for GetJobTrigger. + + Attributes: + name (str): + Required. Resource name of the project and the triggeredJob, + for example + ``projects/dlp-test-project/jobTriggers/53234423``. + """ + + name = proto.Field(proto.STRING, number=1) + + +class CreateDlpJobRequest(proto.Message): + r"""Request message for CreateDlpJobRequest. Used to initiate + long running jobs such as calculating risk metrics or inspecting + Google Cloud Storage. + + Attributes: + parent (str): + Required. Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + inspect_job (~.dlp.InspectJobConfig): + Set to control what and how to inspect. + risk_job (~.dlp.RiskAnalysisJobConfig): + Set to choose what metric to calculate. + job_id (str): + The job id can contain uppercase and lowercase letters, + numbers, and hyphens; that is, it must match the regular + expression: ``[a-zA-Z\\d-_]+``. The maximum length is 100 + characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent = proto.Field(proto.STRING, number=1) + + inspect_job = proto.Field( + proto.MESSAGE, number=2, oneof="job", message="InspectJobConfig", + ) + + risk_job = proto.Field( + proto.MESSAGE, number=3, oneof="job", message=RiskAnalysisJobConfig, + ) + + job_id = proto.Field(proto.STRING, number=4) + + location_id = proto.Field(proto.STRING, number=5) + + +class ListJobTriggersRequest(proto.Message): + r"""Request message for ListJobTriggers. + + Attributes: + parent (str): + Required. Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + page_token (str): + Page token to continue retrieval. Comes from previous call + to ListJobTriggers. ``order_by`` field must not change for + subsequent calls. + page_size (int): + Size of the page, can be limited by a server. + order_by (str): + Comma separated list of triggeredJob fields to order by, + followed by ``asc`` or ``desc`` postfix. This list is + case-insensitive, default sorting order is ascending, + redundant space characters are insignificant. + + Example: ``name asc,update_time, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to time the JobTrigger was + created. + - ``update_time``: corresponds to time the JobTrigger was + last updated. + - ``last_run_time``: corresponds to the last time the + JobTrigger ran. + - ``name``: corresponds to JobTrigger's name. + - ``display_name``: corresponds to JobTrigger's display + name. + - ``status``: corresponds to JobTrigger's status. + filter (str): + Allows filtering. + + Supported syntax: + + - Filter expressions are made up of one or more + restrictions. + - Restrictions can be combined by ``AND`` or ``OR`` logical + operators. A sequence of restrictions implicitly uses + ``AND``. + - A restriction has the form of + ``{field} {operator} {value}``. + - Supported fields/values for inspect jobs: + + - ``status`` - HEALTHY|PAUSED|CANCELLED + - ``inspected_storage`` - + DATASTORE|CLOUD_STORAGE|BIGQUERY + - 'last_run_time\` - RFC 3339 formatted timestamp, + surrounded by quotation marks. Nanoseconds are + ignored. + - 'error_count' - Number of errors that have occurred + while running. + + - The operator must be ``=`` or ``!=`` for status and + inspected_storage. + + Examples: + + - inspected_storage = cloud_storage AND status = HEALTHY + - inspected_storage = cloud_storage OR inspected_storage = + bigquery + - inspected_storage = cloud_storage AND (state = PAUSED OR + state = HEALTHY) + - last_run_time > "2017-12-12T00:00:00+00:00" + + The length of this field should be no more than 500 + characters. + location_id (str): + Deprecated. This field has no effect. + """ + + parent = proto.Field(proto.STRING, number=1) + + page_token = proto.Field(proto.STRING, number=2) + + page_size = proto.Field(proto.INT32, number=3) + + order_by = proto.Field(proto.STRING, number=4) + + filter = proto.Field(proto.STRING, number=5) + + location_id = proto.Field(proto.STRING, number=7) + + +class ListJobTriggersResponse(proto.Message): + r"""Response message for ListJobTriggers. + + Attributes: + job_triggers (Sequence[~.dlp.JobTrigger]): + List of triggeredJobs, up to page_size in + ListJobTriggersRequest. + next_page_token (str): + If the next page is available then the next + page token to be used in following + ListJobTriggers request. + """ + + @property + def raw_page(self): + return self + + job_triggers = proto.RepeatedField(proto.MESSAGE, number=1, message=JobTrigger,) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class DeleteJobTriggerRequest(proto.Message): + r"""Request message for DeleteJobTrigger. + + Attributes: + name (str): + Required. Resource name of the project and the triggeredJob, + for example + ``projects/dlp-test-project/jobTriggers/53234423``. + """ + + name = proto.Field(proto.STRING, number=1) + + +class InspectJobConfig(proto.Message): + r"""Controls what and how to inspect for findings. + + Attributes: + storage_config (~.storage.StorageConfig): + The data to scan. + inspect_config (~.dlp.InspectConfig): + How and what to scan for. + inspect_template_name (str): + If provided, will be used as the default for all values in + InspectConfig. ``inspect_config`` will be merged into the + values persisted as part of the template. + actions (Sequence[~.dlp.Action]): + Actions to execute at the completion of the + job. + """ + + storage_config = proto.Field( + proto.MESSAGE, number=1, message=storage.StorageConfig, + ) + + inspect_config = proto.Field(proto.MESSAGE, number=2, message=InspectConfig,) + + inspect_template_name = proto.Field(proto.STRING, number=3) + + actions = proto.RepeatedField(proto.MESSAGE, number=4, message=Action,) + + +class DlpJob(proto.Message): + r"""Combines all of the information about a DLP job. + + Attributes: + name (str): + The server-assigned name. + type (~.dlp.DlpJobType): + The type of job. + state (~.dlp.DlpJob.JobState): + State of a job. + risk_details (~.dlp.AnalyzeDataSourceRiskDetails): + Results from analyzing risk of a data source. + inspect_details (~.dlp.InspectDataSourceDetails): + Results from inspecting a data source. + create_time (~.timestamp.Timestamp): + Time when the job was created. + start_time (~.timestamp.Timestamp): + Time when the job started. + end_time (~.timestamp.Timestamp): + Time when the job finished. + job_trigger_name (str): + If created by a job trigger, the resource + name of the trigger that instantiated the job. + errors (Sequence[~.dlp.Error]): + A stream of errors encountered running the + job. + """ + + class JobState(proto.Enum): + r"""Possible states of a job. New items may be added.""" + JOB_STATE_UNSPECIFIED = 0 + PENDING = 1 + RUNNING = 2 + DONE = 3 + CANCELED = 4 + FAILED = 5 + ACTIVE = 6 + + name = proto.Field(proto.STRING, number=1) + + type = proto.Field(proto.ENUM, number=2, enum="DlpJobType",) + + state = proto.Field(proto.ENUM, number=3, enum=JobState,) + + risk_details = proto.Field( + proto.MESSAGE, number=4, oneof="details", message=AnalyzeDataSourceRiskDetails, + ) + + inspect_details = proto.Field( + proto.MESSAGE, number=5, oneof="details", message=InspectDataSourceDetails, + ) + + create_time = proto.Field(proto.MESSAGE, number=6, message=timestamp.Timestamp,) + + start_time = proto.Field(proto.MESSAGE, number=7, message=timestamp.Timestamp,) + + end_time = proto.Field(proto.MESSAGE, number=8, message=timestamp.Timestamp,) + + job_trigger_name = proto.Field(proto.STRING, number=10) + + errors = proto.RepeatedField(proto.MESSAGE, number=11, message=Error,) + + +class GetDlpJobRequest(proto.Message): + r"""The request message for [DlpJobs.GetDlpJob][]. + + Attributes: + name (str): + Required. The name of the DlpJob resource. + """ + + name = proto.Field(proto.STRING, number=1) + + +class ListDlpJobsRequest(proto.Message): + r"""The request message for listing DLP jobs. + + Attributes: + parent (str): + Required. Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + filter (str): + Allows filtering. + + Supported syntax: + + - Filter expressions are made up of one or more + restrictions. + - Restrictions can be combined by ``AND`` or ``OR`` logical + operators. A sequence of restrictions implicitly uses + ``AND``. + - A restriction has the form of + ``{field} {operator} {value}``. + - Supported fields/values for inspect jobs: + + - ``state`` - PENDING|RUNNING|CANCELED|FINISHED|FAILED + - ``inspected_storage`` - + DATASTORE|CLOUD_STORAGE|BIGQUERY + - ``trigger_name`` - The resource name of the trigger + that created job. + - 'end_time\` - Corresponds to time the job finished. + - 'start_time\` - Corresponds to time the job finished. + + - Supported fields for risk analysis jobs: + + - ``state`` - RUNNING|CANCELED|FINISHED|FAILED + - 'end_time\` - Corresponds to time the job finished. + - 'start_time\` - Corresponds to time the job finished. + + - The operator must be ``=`` or ``!=``. + + Examples: + + - inspected_storage = cloud_storage AND state = done + - inspected_storage = cloud_storage OR inspected_storage = + bigquery + - inspected_storage = cloud_storage AND (state = done OR + state = canceled) + - end_time > "2017-12-12T00:00:00+00:00" + + The length of this field should be no more than 500 + characters. + page_size (int): + The standard list page size. + page_token (str): + The standard list page token. + type (~.dlp.DlpJobType): + The type of job. Defaults to ``DlpJobType.INSPECT`` + order_by (str): + Comma separated list of fields to order by, followed by + ``asc`` or ``desc`` postfix. This list is case-insensitive, + default sorting order is ascending, redundant space + characters are insignificant. + + Example: ``name asc, end_time asc, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to time the job was created. + - ``end_time``: corresponds to time the job ended. + - ``name``: corresponds to job's name. + - ``state``: corresponds to ``state`` + location_id (str): + Deprecated. This field has no effect. + """ + + parent = proto.Field(proto.STRING, number=4) + + filter = proto.Field(proto.STRING, number=1) + + page_size = proto.Field(proto.INT32, number=2) + + page_token = proto.Field(proto.STRING, number=3) + + type = proto.Field(proto.ENUM, number=5, enum="DlpJobType",) + + order_by = proto.Field(proto.STRING, number=6) + + location_id = proto.Field(proto.STRING, number=7) + + +class ListDlpJobsResponse(proto.Message): + r"""The response message for listing DLP jobs. + + Attributes: + jobs (Sequence[~.dlp.DlpJob]): + A list of DlpJobs that matches the specified + filter in the request. + next_page_token (str): + The standard List next-page token. + """ + + @property + def raw_page(self): + return self + + jobs = proto.RepeatedField(proto.MESSAGE, number=1, message=DlpJob,) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class CancelDlpJobRequest(proto.Message): + r"""The request message for canceling a DLP job. + + Attributes: + name (str): + Required. The name of the DlpJob resource to + be cancelled. + """ + + name = proto.Field(proto.STRING, number=1) + + +class FinishDlpJobRequest(proto.Message): + r"""The request message for finishing a DLP hybrid job. + + Attributes: + name (str): + Required. The name of the DlpJob resource to + be cancelled. + """ + + name = proto.Field(proto.STRING, number=1) + + +class DeleteDlpJobRequest(proto.Message): + r"""The request message for deleting a DLP job. + + Attributes: + name (str): + Required. The name of the DlpJob resource to + be deleted. + """ + + name = proto.Field(proto.STRING, number=1) + + +class CreateDeidentifyTemplateRequest(proto.Message): + r"""Request message for CreateDeidentifyTemplate. + + Attributes: + parent (str): + Required. Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:organizations/[ORGANIZATION-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + deidentify_template (~.dlp.DeidentifyTemplate): + Required. The DeidentifyTemplate to create. + template_id (str): + The template id can contain uppercase and lowercase letters, + numbers, and hyphens; that is, it must match the regular + expression: ``[a-zA-Z\\d-_]+``. The maximum length is 100 + characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent = proto.Field(proto.STRING, number=1) + + deidentify_template = proto.Field( + proto.MESSAGE, number=2, message=DeidentifyTemplate, + ) + + template_id = proto.Field(proto.STRING, number=3) + + location_id = proto.Field(proto.STRING, number=4) + + +class UpdateDeidentifyTemplateRequest(proto.Message): + r"""Request message for UpdateDeidentifyTemplate. + + Attributes: + name (str): + Required. Resource name of organization and deidentify + template to be updated, for example + ``organizations/433245324/deidentifyTemplates/432452342`` or + projects/project-id/deidentifyTemplates/432452342. + deidentify_template (~.dlp.DeidentifyTemplate): + New DeidentifyTemplate value. + update_mask (~.field_mask.FieldMask): + Mask to control which fields get updated. + """ + + name = proto.Field(proto.STRING, number=1) + + deidentify_template = proto.Field( + proto.MESSAGE, number=2, message=DeidentifyTemplate, + ) + + update_mask = proto.Field(proto.MESSAGE, number=3, message=field_mask.FieldMask,) + + +class GetDeidentifyTemplateRequest(proto.Message): + r"""Request message for GetDeidentifyTemplate. + + Attributes: + name (str): + Required. Resource name of the organization and deidentify + template to be read, for example + ``organizations/433245324/deidentifyTemplates/432452342`` or + projects/project-id/deidentifyTemplates/432452342. + """ + + name = proto.Field(proto.STRING, number=1) + + +class ListDeidentifyTemplatesRequest(proto.Message): + r"""Request message for ListDeidentifyTemplates. + + Attributes: + parent (str): + Required. Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:organizations/[ORGANIZATION-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + page_token (str): + Page token to continue retrieval. Comes from previous call + to ``ListDeidentifyTemplates``. + page_size (int): + Size of the page, can be limited by server. + If zero server returns a page of max size 100. + order_by (str): + Comma separated list of fields to order by, followed by + ``asc`` or ``desc`` postfix. This list is case-insensitive, + default sorting order is ascending, redundant space + characters are insignificant. + + Example: ``name asc,update_time, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to time the template was + created. + - ``update_time``: corresponds to time the template was + last updated. + - ``name``: corresponds to template's name. + - ``display_name``: corresponds to template's display name. + location_id (str): + Deprecated. This field has no effect. + """ + + parent = proto.Field(proto.STRING, number=1) + + page_token = proto.Field(proto.STRING, number=2) + + page_size = proto.Field(proto.INT32, number=3) + + order_by = proto.Field(proto.STRING, number=4) + + location_id = proto.Field(proto.STRING, number=5) + + +class ListDeidentifyTemplatesResponse(proto.Message): + r"""Response message for ListDeidentifyTemplates. + + Attributes: + deidentify_templates (Sequence[~.dlp.DeidentifyTemplate]): + List of deidentify templates, up to page_size in + ListDeidentifyTemplatesRequest. + next_page_token (str): + If the next page is available then the next + page token to be used in following + ListDeidentifyTemplates request. + """ + + @property + def raw_page(self): + return self + + deidentify_templates = proto.RepeatedField( + proto.MESSAGE, number=1, message=DeidentifyTemplate, + ) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class DeleteDeidentifyTemplateRequest(proto.Message): + r"""Request message for DeleteDeidentifyTemplate. + + Attributes: + name (str): + Required. Resource name of the organization and deidentify + template to be deleted, for example + ``organizations/433245324/deidentifyTemplates/432452342`` or + projects/project-id/deidentifyTemplates/432452342. + """ + + name = proto.Field(proto.STRING, number=1) + + +class LargeCustomDictionaryConfig(proto.Message): + r"""Configuration for a custom dictionary created from a data source of + any size up to the maximum size defined in the + `limits `__ page. The artifacts + of dictionary creation are stored in the specified Google Cloud + Storage location. Consider using ``CustomInfoType.Dictionary`` for + smaller dictionaries that satisfy the size requirements. + + Attributes: + output_path (~.storage.CloudStoragePath): + Location to store dictionary artifacts in + Google Cloud Storage. These files will only be + accessible by project owners and the DLP API. If + any of these artifacts are modified, the + dictionary is considered invalid and can no + longer be used. + cloud_storage_file_set (~.storage.CloudStorageFileSet): + Set of files containing newline-delimited + lists of dictionary phrases. + big_query_field (~.storage.BigQueryField): + Field in a BigQuery table where each cell + represents a dictionary phrase. + """ + + output_path = proto.Field( + proto.MESSAGE, number=1, message=storage.CloudStoragePath, + ) + + cloud_storage_file_set = proto.Field( + proto.MESSAGE, number=2, oneof="source", message=storage.CloudStorageFileSet, + ) + + big_query_field = proto.Field( + proto.MESSAGE, number=3, oneof="source", message=storage.BigQueryField, + ) + + +class LargeCustomDictionaryStats(proto.Message): + r"""Summary statistics of a custom dictionary. + + Attributes: + approx_num_phrases (int): + Approximate number of distinct phrases in the + dictionary. + """ + + approx_num_phrases = proto.Field(proto.INT64, number=1) + + +class StoredInfoTypeConfig(proto.Message): + r"""Configuration for stored infoTypes. All fields and subfield + are provided by the user. For more information, see + https://cloud.google.com/dlp/docs/creating-custom-infotypes. + + Attributes: + display_name (str): + Display name of the StoredInfoType (max 256 + characters). + description (str): + Description of the StoredInfoType (max 256 + characters). + large_custom_dictionary (~.dlp.LargeCustomDictionaryConfig): + StoredInfoType where findings are defined by + a dictionary of phrases. + dictionary (~.storage.CustomInfoType.Dictionary): + Store dictionary-based CustomInfoType. + regex (~.storage.CustomInfoType.Regex): + Store regular expression-based + StoredInfoType. + """ + + display_name = proto.Field(proto.STRING, number=1) + + description = proto.Field(proto.STRING, number=2) + + large_custom_dictionary = proto.Field( + proto.MESSAGE, number=3, oneof="type", message=LargeCustomDictionaryConfig, + ) + + dictionary = proto.Field( + proto.MESSAGE, + number=4, + oneof="type", + message=storage.CustomInfoType.Dictionary, + ) + + regex = proto.Field( + proto.MESSAGE, number=5, oneof="type", message=storage.CustomInfoType.Regex, + ) + + +class StoredInfoTypeStats(proto.Message): + r"""Statistics for a StoredInfoType. + + Attributes: + large_custom_dictionary (~.dlp.LargeCustomDictionaryStats): + StoredInfoType where findings are defined by + a dictionary of phrases. + """ + + large_custom_dictionary = proto.Field( + proto.MESSAGE, number=1, oneof="type", message=LargeCustomDictionaryStats, + ) + + +class StoredInfoTypeVersion(proto.Message): + r"""Version of a StoredInfoType, including the configuration used + to build it, create timestamp, and current state. + + Attributes: + config (~.dlp.StoredInfoTypeConfig): + StoredInfoType configuration. + create_time (~.timestamp.Timestamp): + Create timestamp of the version. Read-only, + determined by the system when the version is + created. + state (~.dlp.StoredInfoTypeState): + Stored info type version state. Read-only, + updated by the system during dictionary + creation. + errors (Sequence[~.dlp.Error]): + Errors that occurred when creating this storedInfoType + version, or anomalies detected in the storedInfoType data + that render it unusable. Only the five most recent errors + will be displayed, with the most recent error appearing + first. + + For example, some of the data for stored custom dictionaries + is put in the user's Google Cloud Storage bucket, and if + this data is modified or deleted by the user or another + system, the dictionary becomes invalid. + + If any errors occur, fix the problem indicated by the error + message and use the UpdateStoredInfoType API method to + create another version of the storedInfoType to continue + using it, reusing the same ``config`` if it was not the + source of the error. + stats (~.dlp.StoredInfoTypeStats): + Statistics about this storedInfoType version. + """ + + config = proto.Field(proto.MESSAGE, number=1, message=StoredInfoTypeConfig,) + + create_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) + + state = proto.Field(proto.ENUM, number=3, enum="StoredInfoTypeState",) + + errors = proto.RepeatedField(proto.MESSAGE, number=4, message=Error,) + + stats = proto.Field(proto.MESSAGE, number=5, message=StoredInfoTypeStats,) + + +class StoredInfoType(proto.Message): + r"""StoredInfoType resource message that contains information + about the current version and any pending updates. + + Attributes: + name (str): + Resource name. + current_version (~.dlp.StoredInfoTypeVersion): + Current version of the stored info type. + pending_versions (Sequence[~.dlp.StoredInfoTypeVersion]): + Pending versions of the stored info type. + Empty if no versions are pending. + """ + + name = proto.Field(proto.STRING, number=1) + + current_version = proto.Field( + proto.MESSAGE, number=2, message=StoredInfoTypeVersion, + ) + + pending_versions = proto.RepeatedField( + proto.MESSAGE, number=3, message=StoredInfoTypeVersion, + ) + + +class CreateStoredInfoTypeRequest(proto.Message): + r"""Request message for CreateStoredInfoType. + + Attributes: + parent (str): + Required. Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:organizations/[ORGANIZATION-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + config (~.dlp.StoredInfoTypeConfig): + Required. Configuration of the storedInfoType + to create. + stored_info_type_id (str): + The storedInfoType ID can contain uppercase and lowercase + letters, numbers, and hyphens; that is, it must match the + regular expression: ``[a-zA-Z\\d-_]+``. The maximum length + is 100 characters. Can be empty to allow the system to + generate one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent = proto.Field(proto.STRING, number=1) + + config = proto.Field(proto.MESSAGE, number=2, message=StoredInfoTypeConfig,) + + stored_info_type_id = proto.Field(proto.STRING, number=3) + + location_id = proto.Field(proto.STRING, number=4) + + +class UpdateStoredInfoTypeRequest(proto.Message): + r"""Request message for UpdateStoredInfoType. + + Attributes: + name (str): + Required. Resource name of organization and storedInfoType + to be updated, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + config (~.dlp.StoredInfoTypeConfig): + Updated configuration for the storedInfoType. + If not provided, a new version of the + storedInfoType will be created with the existing + configuration. + update_mask (~.field_mask.FieldMask): + Mask to control which fields get updated. + """ + + name = proto.Field(proto.STRING, number=1) + + config = proto.Field(proto.MESSAGE, number=2, message=StoredInfoTypeConfig,) + + update_mask = proto.Field(proto.MESSAGE, number=3, message=field_mask.FieldMask,) + + +class GetStoredInfoTypeRequest(proto.Message): + r"""Request message for GetStoredInfoType. + + Attributes: + name (str): + Required. Resource name of the organization and + storedInfoType to be read, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + """ + + name = proto.Field(proto.STRING, number=1) + + +class ListStoredInfoTypesRequest(proto.Message): + r"""Request message for ListStoredInfoTypes. + + Attributes: + parent (str): + Required. Parent resource name. + + - Format:projects/[PROJECT-ID] + - Format:organizations/[ORGANIZATION-ID] + - Format:projects/[PROJECT-ID]/locations/[LOCATION-ID] + - Format:organizations/[ORGANIZATION-ID]/locations/[LOCATION-ID] + page_token (str): + Page token to continue retrieval. Comes from previous call + to ``ListStoredInfoTypes``. + page_size (int): + Size of the page, can be limited by server. + If zero server returns a page of max size 100. + order_by (str): + Comma separated list of fields to order by, followed by + ``asc`` or ``desc`` postfix. This list is case-insensitive, + default sorting order is ascending, redundant space + characters are insignificant. + + Example: ``name asc, display_name, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to time the most recent + version of the resource was created. + - ``state``: corresponds to the state of the resource. + - ``name``: corresponds to resource name. + - ``display_name``: corresponds to info type's display + name. + location_id (str): + Deprecated. This field has no effect. + """ + + parent = proto.Field(proto.STRING, number=1) + + page_token = proto.Field(proto.STRING, number=2) + + page_size = proto.Field(proto.INT32, number=3) + + order_by = proto.Field(proto.STRING, number=4) + + location_id = proto.Field(proto.STRING, number=5) + + +class ListStoredInfoTypesResponse(proto.Message): + r"""Response message for ListStoredInfoTypes. + + Attributes: + stored_info_types (Sequence[~.dlp.StoredInfoType]): + List of storedInfoTypes, up to page_size in + ListStoredInfoTypesRequest. + next_page_token (str): + If the next page is available then the next + page token to be used in following + ListStoredInfoTypes request. + """ + + @property + def raw_page(self): + return self + + stored_info_types = proto.RepeatedField( + proto.MESSAGE, number=1, message=StoredInfoType, + ) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class DeleteStoredInfoTypeRequest(proto.Message): + r"""Request message for DeleteStoredInfoType. + + Attributes: + name (str): + Required. Resource name of the organization and + storedInfoType to be deleted, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + """ + + name = proto.Field(proto.STRING, number=1) + + +class HybridInspectJobTriggerRequest(proto.Message): + r"""Request to search for potentially sensitive info in a custom + location. + + Attributes: + name (str): + Required. Resource name of the trigger to execute a hybrid + inspect on, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + hybrid_item (~.dlp.HybridContentItem): + The item to inspect. + """ + + name = proto.Field(proto.STRING, number=1) + + hybrid_item = proto.Field(proto.MESSAGE, number=3, message="HybridContentItem",) + + +class HybridInspectDlpJobRequest(proto.Message): + r"""Request to search for potentially sensitive info in a custom + location. + + Attributes: + name (str): + Required. Resource name of the job to execute a hybrid + inspect on, for example + ``projects/dlp-test-project/dlpJob/53234423``. + hybrid_item (~.dlp.HybridContentItem): + The item to inspect. + """ + + name = proto.Field(proto.STRING, number=1) + + hybrid_item = proto.Field(proto.MESSAGE, number=3, message="HybridContentItem",) + + +class HybridContentItem(proto.Message): + r"""An individual hybrid item to inspect. Will be stored + temporarily during processing. + + Attributes: + item (~.dlp.ContentItem): + The item to inspect. + finding_details (~.dlp.HybridFindingDetails): + Supplementary information that will be added + to each finding. + """ + + item = proto.Field(proto.MESSAGE, number=1, message=ContentItem,) + + finding_details = proto.Field( + proto.MESSAGE, number=2, message="HybridFindingDetails", + ) + + +class HybridFindingDetails(proto.Message): + r"""Populate to associate additional data with each finding. + + Attributes: + container_details (~.dlp.Container): + Details about the container where the content + being inspected is from. + file_offset (int): + Offset in bytes of the line, from the + beginning of the file, where the finding is + located. Populate if the item being scanned is + only part of a bigger item, such as a shard of a + file and you want to track the absolute position + of the finding. + row_offset (int): + Offset of the row for tables. Populate if the + row(s) being scanned are part of a bigger + dataset and you want to keep track of their + absolute position. + table_options (~.storage.TableOptions): + If the container is a table, additional information to make + findings meaningful such as the columns that are primary + keys. If not known ahead of time, can also be set within + each inspect hybrid call and the two will be merged. Note + that identifying_fields will only be stored to BigQuery, and + only if the BigQuery action has been included. + labels (Sequence[~.dlp.HybridFindingDetails.LabelsEntry]): + Labels to represent user provided metadata about the data + being inspected. If configured by the job, some key values + may be required. The labels associated with ``Finding``'s + produced by hybrid inspection. + + Label keys must be between 1 and 63 characters long and must + conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + + Label values must be between 0 and 63 characters long and + must conform to the regular expression + ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. + + No more than 10 labels can be associated with a given + finding. + + Examples: + + - ``"environment" : "production"`` + - ``"pipeline" : "etl"`` + """ + + container_details = proto.Field(proto.MESSAGE, number=1, message=Container,) + + file_offset = proto.Field(proto.INT64, number=2) + + row_offset = proto.Field(proto.INT64, number=3) + + table_options = proto.Field(proto.MESSAGE, number=4, message=storage.TableOptions,) + + labels = proto.MapField(proto.STRING, proto.STRING, number=5) + + +class HybridInspectResponse(proto.Message): + r"""Quota exceeded errors will be thrown once quota has been met.""" + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/dlp_v2/types/storage.py b/google/cloud/dlp_v2/types/storage.py new file mode 100644 index 00000000..c7a1e455 --- /dev/null +++ b/google/cloud/dlp_v2/types/storage.py @@ -0,0 +1,1004 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.protobuf import timestamp_pb2 as timestamp # type: ignore + + +__protobuf__ = proto.module( + package="google.privacy.dlp.v2", + manifest={ + "Likelihood", + "FileType", + "InfoType", + "StoredType", + "CustomInfoType", + "FieldId", + "PartitionId", + "KindExpression", + "DatastoreOptions", + "CloudStorageRegexFileSet", + "CloudStorageOptions", + "CloudStorageFileSet", + "CloudStoragePath", + "BigQueryOptions", + "StorageConfig", + "HybridOptions", + "BigQueryKey", + "DatastoreKey", + "Key", + "RecordKey", + "BigQueryTable", + "BigQueryField", + "EntityId", + "TableOptions", + }, +) + + +class Likelihood(proto.Enum): + r"""Categorization of results based on how likely they are to + represent a match, based on the number of elements they contain + which imply a match. + """ + LIKELIHOOD_UNSPECIFIED = 0 + VERY_UNLIKELY = 1 + UNLIKELY = 2 + POSSIBLE = 3 + LIKELY = 4 + VERY_LIKELY = 5 + + +class FileType(proto.Enum): + r"""Definitions of file type groups to scan. New types will be + added to this list. + """ + FILE_TYPE_UNSPECIFIED = 0 + BINARY_FILE = 1 + TEXT_FILE = 2 + IMAGE = 3 + WORD = 5 + PDF = 6 + AVRO = 7 + CSV = 8 + TSV = 9 + + +class InfoType(proto.Message): + r"""Type of information detected by the API. + + Attributes: + name (str): + Name of the information type. Either a name of your choosing + when creating a CustomInfoType, or one of the names listed + at https://cloud.google.com/dlp/docs/infotypes-reference + when specifying a built-in type. InfoType names should + conform to the pattern ``[a-zA-Z0-9_]{1,64}``. + """ + + name = proto.Field(proto.STRING, number=1) + + +class StoredType(proto.Message): + r"""A reference to a StoredInfoType to use with scanning. + + Attributes: + name (str): + Resource name of the requested ``StoredInfoType``, for + example + ``organizations/433245324/storedInfoTypes/432452342`` or + ``projects/project-id/storedInfoTypes/432452342``. + create_time (~.timestamp.Timestamp): + Timestamp indicating when the version of the + ``StoredInfoType`` used for inspection was created. + Output-only field, populated by the system. + """ + + name = proto.Field(proto.STRING, number=1) + + create_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) + + +class CustomInfoType(proto.Message): + r"""Custom information type provided by the user. Used to find + domain-specific sensitive information configurable to the data + in question. + + Attributes: + info_type (~.storage.InfoType): + CustomInfoType can either be a new infoType, or an extension + of built-in infoType, when the name matches one of existing + infoTypes and that infoType is specified in + ``InspectContent.info_types`` field. Specifying the latter + adds findings to the one detected by the system. If built-in + info type is not specified in ``InspectContent.info_types`` + list then the name is treated as a custom info type. + likelihood (~.storage.Likelihood): + Likelihood to return for this CustomInfoType. This base + value can be altered by a detection rule if the finding + meets the criteria specified by the rule. Defaults to + ``VERY_LIKELY`` if not specified. + dictionary (~.storage.CustomInfoType.Dictionary): + A list of phrases to detect as a + CustomInfoType. + regex (~.storage.CustomInfoType.Regex): + Regular expression based CustomInfoType. + surrogate_type (~.storage.CustomInfoType.SurrogateType): + Message for detecting output from + deidentification transformations that support + reversing. + stored_type (~.storage.StoredType): + Load an existing ``StoredInfoType`` resource for use in + ``InspectDataSource``. Not currently supported in + ``InspectContent``. + detection_rules (Sequence[~.storage.CustomInfoType.DetectionRule]): + Set of detection rules to apply to all findings of this + CustomInfoType. Rules are applied in order that they are + specified. Not supported for the ``surrogate_type`` + CustomInfoType. + exclusion_type (~.storage.CustomInfoType.ExclusionType): + If set to EXCLUSION_TYPE_EXCLUDE this infoType will not + cause a finding to be returned. It still can be used for + rules matching. + """ + + class ExclusionType(proto.Enum): + r"""""" + EXCLUSION_TYPE_UNSPECIFIED = 0 + EXCLUSION_TYPE_EXCLUDE = 1 + + class Dictionary(proto.Message): + r"""Custom information type based on a dictionary of words or phrases. + This can be used to match sensitive information specific to the + data, such as a list of employee IDs or job titles. + + Dictionary words are case-insensitive and all characters other than + letters and digits in the unicode `Basic Multilingual + Plane `__ + will be replaced with whitespace when scanning for matches, so the + dictionary phrase "Sam Johnson" will match all three phrases "sam + johnson", "Sam, Johnson", and "Sam (Johnson)". Additionally, the + characters surrounding any match must be of a different type than + the adjacent characters within the word, so letters must be next to + non-letters and digits next to non-digits. For example, the + dictionary word "jen" will match the first three letters of the text + "jen123" but will return no matches for "jennifer". + + Dictionary words containing a large number of characters that are + not letters or digits may result in unexpected findings because such + characters are treated as whitespace. The + `limits `__ page contains + details about the size limits of dictionaries. For dictionaries that + do not fit within these constraints, consider using + ``LargeCustomDictionaryConfig`` in the ``StoredInfoType`` API. + + Attributes: + word_list (~.storage.CustomInfoType.Dictionary.WordList): + List of words or phrases to search for. + cloud_storage_path (~.storage.CloudStoragePath): + Newline-delimited file of words in Cloud + Storage. Only a single file is accepted. + """ + + class WordList(proto.Message): + r"""Message defining a list of words or phrases to search for in + the data. + + Attributes: + words (Sequence[str]): + Words or phrases defining the dictionary. The dictionary + must contain at least one phrase and every phrase must + contain at least 2 characters that are letters or digits. + [required] + """ + + words = proto.RepeatedField(proto.STRING, number=1) + + word_list = proto.Field( + proto.MESSAGE, + number=1, + oneof="source", + message="CustomInfoType.Dictionary.WordList", + ) + + cloud_storage_path = proto.Field( + proto.MESSAGE, number=3, oneof="source", message="CloudStoragePath", + ) + + class Regex(proto.Message): + r"""Message defining a custom regular expression. + + Attributes: + pattern (str): + Pattern defining the regular expression. Its + syntax + (https://github.com/google/re2/wiki/Syntax) can + be found under the google/re2 repository on + GitHub. + group_indexes (Sequence[int]): + The index of the submatch to extract as + findings. When not specified, the entire match + is returned. No more than 3 may be included. + """ + + pattern = proto.Field(proto.STRING, number=1) + + group_indexes = proto.RepeatedField(proto.INT32, number=2) + + class SurrogateType(proto.Message): + r"""Message for detecting output from deidentification transformations + such as + ```CryptoReplaceFfxFpeConfig`` `__. + These types of transformations are those that perform + pseudonymization, thereby producing a "surrogate" as output. This + should be used in conjunction with a field on the transformation + such as ``surrogate_info_type``. This CustomInfoType does not + support the use of ``detection_rules``. + """ + + class DetectionRule(proto.Message): + r"""Deprecated; use ``InspectionRuleSet`` instead. Rule for modifying a + ``CustomInfoType`` to alter behavior under certain circumstances, + depending on the specific details of the rule. Not supported for the + ``surrogate_type`` custom infoType. + + Attributes: + hotword_rule (~.storage.CustomInfoType.DetectionRule.HotwordRule): + Hotword-based detection rule. + """ + + class Proximity(proto.Message): + r"""Message for specifying a window around a finding to apply a + detection rule. + + Attributes: + window_before (int): + Number of characters before the finding to + consider. + window_after (int): + Number of characters after the finding to + consider. + """ + + window_before = proto.Field(proto.INT32, number=1) + + window_after = proto.Field(proto.INT32, number=2) + + class LikelihoodAdjustment(proto.Message): + r"""Message for specifying an adjustment to the likelihood of a + finding as part of a detection rule. + + Attributes: + fixed_likelihood (~.storage.Likelihood): + Set the likelihood of a finding to a fixed + value. + relative_likelihood (int): + Increase or decrease the likelihood by the specified number + of levels. For example, if a finding would be ``POSSIBLE`` + without the detection rule and ``relative_likelihood`` is 1, + then it is upgraded to ``LIKELY``, while a value of -1 would + downgrade it to ``UNLIKELY``. Likelihood may never drop + below ``VERY_UNLIKELY`` or exceed ``VERY_LIKELY``, so + applying an adjustment of 1 followed by an adjustment of -1 + when base likelihood is ``VERY_LIKELY`` will result in a + final likelihood of ``LIKELY``. + """ + + fixed_likelihood = proto.Field( + proto.ENUM, number=1, oneof="adjustment", enum="Likelihood", + ) + + relative_likelihood = proto.Field(proto.INT32, number=2, oneof="adjustment") + + class HotwordRule(proto.Message): + r"""The rule that adjusts the likelihood of findings within a + certain proximity of hotwords. + + Attributes: + hotword_regex (~.storage.CustomInfoType.Regex): + Regular expression pattern defining what + qualifies as a hotword. + proximity (~.storage.CustomInfoType.DetectionRule.Proximity): + Proximity of the finding within which the + entire hotword must reside. The total length of + the window cannot exceed 1000 characters. Note + that the finding itself will be included in the + window, so that hotwords may be used to match + substrings of the finding itself. For example, + the certainty of a phone number regex "\(\d{3}\) + \d{3}-\d{4}" could be adjusted upwards if the + area code is known to be the local area code of + a company office using the hotword regex + "\(xxx\)", where "xxx" is the area code in + question. + likelihood_adjustment (~.storage.CustomInfoType.DetectionRule.LikelihoodAdjustment): + Likelihood adjustment to apply to all + matching findings. + """ + + hotword_regex = proto.Field( + proto.MESSAGE, number=1, message="CustomInfoType.Regex", + ) + + proximity = proto.Field( + proto.MESSAGE, + number=2, + message="CustomInfoType.DetectionRule.Proximity", + ) + + likelihood_adjustment = proto.Field( + proto.MESSAGE, + number=3, + message="CustomInfoType.DetectionRule.LikelihoodAdjustment", + ) + + hotword_rule = proto.Field( + proto.MESSAGE, + number=1, + oneof="type", + message="CustomInfoType.DetectionRule.HotwordRule", + ) + + info_type = proto.Field(proto.MESSAGE, number=1, message=InfoType,) + + likelihood = proto.Field(proto.ENUM, number=6, enum="Likelihood",) + + dictionary = proto.Field(proto.MESSAGE, number=2, oneof="type", message=Dictionary,) + + regex = proto.Field(proto.MESSAGE, number=3, oneof="type", message=Regex,) + + surrogate_type = proto.Field( + proto.MESSAGE, number=4, oneof="type", message=SurrogateType, + ) + + stored_type = proto.Field( + proto.MESSAGE, number=5, oneof="type", message=StoredType, + ) + + detection_rules = proto.RepeatedField( + proto.MESSAGE, number=7, message=DetectionRule, + ) + + exclusion_type = proto.Field(proto.ENUM, number=8, enum=ExclusionType,) + + +class FieldId(proto.Message): + r"""General identifier of a data field in a storage service. + + Attributes: + name (str): + Name describing the field. + """ + + name = proto.Field(proto.STRING, number=1) + + +class PartitionId(proto.Message): + r"""Datastore partition ID. + A partition ID identifies a grouping of entities. The grouping + is always by project and namespace, however the namespace ID may + be empty. + A partition ID contains several dimensions: + project ID and namespace ID. + + Attributes: + project_id (str): + The ID of the project to which the entities + belong. + namespace_id (str): + If not empty, the ID of the namespace to + which the entities belong. + """ + + project_id = proto.Field(proto.STRING, number=2) + + namespace_id = proto.Field(proto.STRING, number=4) + + +class KindExpression(proto.Message): + r"""A representation of a Datastore kind. + + Attributes: + name (str): + The name of the kind. + """ + + name = proto.Field(proto.STRING, number=1) + + +class DatastoreOptions(proto.Message): + r"""Options defining a data set within Google Cloud Datastore. + + Attributes: + partition_id (~.storage.PartitionId): + A partition ID identifies a grouping of + entities. The grouping is always by project and + namespace, however the namespace ID may be + empty. + kind (~.storage.KindExpression): + The kind to process. + """ + + partition_id = proto.Field(proto.MESSAGE, number=1, message=PartitionId,) + + kind = proto.Field(proto.MESSAGE, number=2, message=KindExpression,) + + +class CloudStorageRegexFileSet(proto.Message): + r"""Message representing a set of files in a Cloud Storage bucket. + Regular expressions are used to allow fine-grained control over + which files in the bucket to include. + + Included files are those that match at least one item in + ``include_regex`` and do not match any items in ``exclude_regex``. + Note that a file that matches items from both lists will *not* be + included. For a match to occur, the entire file path (i.e., + everything in the url after the bucket name) must match the regular + expression. + + For example, given the input + ``{bucket_name: "mybucket", include_regex: ["directory1/.*"], exclude_regex: ["directory1/excluded.*"]}``: + + - ``gs://mybucket/directory1/myfile`` will be included + - ``gs://mybucket/directory1/directory2/myfile`` will be included + (``.*`` matches across ``/``) + - ``gs://mybucket/directory0/directory1/myfile`` will *not* be + included (the full path doesn't match any items in + ``include_regex``) + - ``gs://mybucket/directory1/excludedfile`` will *not* be included + (the path matches an item in ``exclude_regex``) + + If ``include_regex`` is left empty, it will match all files by + default (this is equivalent to setting ``include_regex: [".*"]``). + + Some other common use cases: + + - ``{bucket_name: "mybucket", exclude_regex: [".*\.pdf"]}`` will + include all files in ``mybucket`` except for .pdf files + - ``{bucket_name: "mybucket", include_regex: ["directory/[^/]+"]}`` + will include all files directly under + ``gs://mybucket/directory/``, without matching across ``/`` + + Attributes: + bucket_name (str): + The name of a Cloud Storage bucket. Required. + include_regex (Sequence[str]): + A list of regular expressions matching file paths to + include. All files in the bucket that match at least one of + these regular expressions will be included in the set of + files, except for those that also match an item in + ``exclude_regex``. Leaving this field empty will match all + files by default (this is equivalent to including ``.*`` in + the list). + + Regular expressions use RE2 + `syntax `__; a + guide can be found under the google/re2 repository on + GitHub. + exclude_regex (Sequence[str]): + A list of regular expressions matching file paths to + exclude. All files in the bucket that match at least one of + these regular expressions will be excluded from the scan. + + Regular expressions use RE2 + `syntax `__; a + guide can be found under the google/re2 repository on + GitHub. + """ + + bucket_name = proto.Field(proto.STRING, number=1) + + include_regex = proto.RepeatedField(proto.STRING, number=2) + + exclude_regex = proto.RepeatedField(proto.STRING, number=3) + + +class CloudStorageOptions(proto.Message): + r"""Options defining a file or a set of files within a Google + Cloud Storage bucket. + + Attributes: + file_set (~.storage.CloudStorageOptions.FileSet): + The set of one or more files to scan. + bytes_limit_per_file (int): + Max number of bytes to scan from a file. If a scanned file's + size is bigger than this value then the rest of the bytes + are omitted. Only one of bytes_limit_per_file and + bytes_limit_per_file_percent can be specified. + bytes_limit_per_file_percent (int): + Max percentage of bytes to scan from a file. The rest are + omitted. The number of bytes scanned is rounded down. Must + be between 0 and 100, inclusively. Both 0 and 100 means no + limit. Defaults to 0. Only one of bytes_limit_per_file and + bytes_limit_per_file_percent can be specified. + file_types (Sequence[~.storage.FileType]): + List of file type groups to include in the scan. If empty, + all files are scanned and available data format processors + are applied. In addition, the binary content of the selected + files is always scanned as well. Images are scanned only as + binary if the specified region does not support image + inspection and no file_types were specified. Image + inspection is restricted to 'global', 'us', 'asia', and + 'europe'. + sample_method (~.storage.CloudStorageOptions.SampleMethod): + + files_limit_percent (int): + Limits the number of files to scan to this + percentage of the input FileSet. Number of files + scanned is rounded down. Must be between 0 and + 100, inclusively. Both 0 and 100 means no limit. + Defaults to 0. + """ + + class SampleMethod(proto.Enum): + r"""How to sample bytes if not all bytes are scanned. Meaningful only + when used in conjunction with bytes_limit_per_file. If not + specified, scanning would start from the top. + """ + SAMPLE_METHOD_UNSPECIFIED = 0 + TOP = 1 + RANDOM_START = 2 + + class FileSet(proto.Message): + r"""Set of files to scan. + + Attributes: + url (str): + The Cloud Storage url of the file(s) to scan, in the format + ``gs:///``. Trailing wildcard in the path is + allowed. + + If the url ends in a trailing slash, the bucket or directory + represented by the url will be scanned non-recursively + (content in sub-directories will not be scanned). This means + that ``gs://mybucket/`` is equivalent to + ``gs://mybucket/*``, and ``gs://mybucket/directory/`` is + equivalent to ``gs://mybucket/directory/*``. + + Exactly one of ``url`` or ``regex_file_set`` must be set. + regex_file_set (~.storage.CloudStorageRegexFileSet): + The regex-filtered set of files to scan. Exactly one of + ``url`` or ``regex_file_set`` must be set. + """ + + url = proto.Field(proto.STRING, number=1) + + regex_file_set = proto.Field( + proto.MESSAGE, number=2, message=CloudStorageRegexFileSet, + ) + + file_set = proto.Field(proto.MESSAGE, number=1, message=FileSet,) + + bytes_limit_per_file = proto.Field(proto.INT64, number=4) + + bytes_limit_per_file_percent = proto.Field(proto.INT32, number=8) + + file_types = proto.RepeatedField(proto.ENUM, number=5, enum="FileType",) + + sample_method = proto.Field(proto.ENUM, number=6, enum=SampleMethod,) + + files_limit_percent = proto.Field(proto.INT32, number=7) + + +class CloudStorageFileSet(proto.Message): + r"""Message representing a set of files in Cloud Storage. + + Attributes: + url (str): + The url, in the format ``gs:///``. Trailing + wildcard in the path is allowed. + """ + + url = proto.Field(proto.STRING, number=1) + + +class CloudStoragePath(proto.Message): + r"""Message representing a single file or path in Cloud Storage. + + Attributes: + path (str): + A url representing a file or path (no wildcards) in Cloud + Storage. Example: gs://[BUCKET_NAME]/dictionary.txt + """ + + path = proto.Field(proto.STRING, number=1) + + +class BigQueryOptions(proto.Message): + r"""Options defining BigQuery table and row identifiers. + + Attributes: + table_reference (~.storage.BigQueryTable): + Complete BigQuery table reference. + identifying_fields (Sequence[~.storage.FieldId]): + Table fields that may uniquely identify a row within the + table. When ``actions.saveFindings.outputConfig.table`` is + specified, the values of columns specified here are + available in the output table under + ``location.content_locations.record_location.record_key.id_values``. + Nested fields such as ``person.birthdate.year`` are allowed. + rows_limit (int): + Max number of rows to scan. If the table has more rows than + this value, the rest of the rows are omitted. If not set, or + if set to 0, all rows will be scanned. Only one of + rows_limit and rows_limit_percent can be specified. Cannot + be used in conjunction with TimespanConfig. + rows_limit_percent (int): + Max percentage of rows to scan. The rest are omitted. The + number of rows scanned is rounded down. Must be between 0 + and 100, inclusively. Both 0 and 100 means no limit. + Defaults to 0. Only one of rows_limit and rows_limit_percent + can be specified. Cannot be used in conjunction with + TimespanConfig. + sample_method (~.storage.BigQueryOptions.SampleMethod): + + excluded_fields (Sequence[~.storage.FieldId]): + References to fields excluded from scanning. + This allows you to skip inspection of entire + columns which you know have no findings. + """ + + class SampleMethod(proto.Enum): + r"""How to sample rows if not all rows are scanned. Meaningful only when + used in conjunction with either rows_limit or rows_limit_percent. If + not specified, scanning would start from the top. + """ + SAMPLE_METHOD_UNSPECIFIED = 0 + TOP = 1 + RANDOM_START = 2 + + table_reference = proto.Field(proto.MESSAGE, number=1, message="BigQueryTable",) + + identifying_fields = proto.RepeatedField(proto.MESSAGE, number=2, message=FieldId,) + + rows_limit = proto.Field(proto.INT64, number=3) + + rows_limit_percent = proto.Field(proto.INT32, number=6) + + sample_method = proto.Field(proto.ENUM, number=4, enum=SampleMethod,) + + excluded_fields = proto.RepeatedField(proto.MESSAGE, number=5, message=FieldId,) + + +class StorageConfig(proto.Message): + r"""Shared message indicating Cloud storage type. + + Attributes: + datastore_options (~.storage.DatastoreOptions): + Google Cloud Datastore options. + cloud_storage_options (~.storage.CloudStorageOptions): + Google Cloud Storage options. + big_query_options (~.storage.BigQueryOptions): + BigQuery options. + hybrid_options (~.storage.HybridOptions): + Hybrid inspection options. + Early access feature is in a pre-release state + and might change or have limited support. For + more information, see + https://cloud.google.com/products#product- + launch-stages. + timespan_config (~.storage.StorageConfig.TimespanConfig): + + """ + + class TimespanConfig(proto.Message): + r"""Configuration of the timespan of the items to include in + scanning. Currently only supported when inspecting Google Cloud + Storage and BigQuery. + + Attributes: + start_time (~.timestamp.Timestamp): + Exclude files or rows older than this value. + end_time (~.timestamp.Timestamp): + Exclude files or rows newer than this value. + If set to zero, no upper time limit is applied. + timestamp_field (~.storage.FieldId): + Specification of the field containing the timestamp of + scanned items. Used for data sources like Datastore and + BigQuery. + + For BigQuery: Required to filter out rows based on the given + start and end times. If not specified and the table was + modified between the given start and end times, the entire + table will be scanned. The valid data types of the timestamp + field are: ``INTEGER``, ``DATE``, ``TIMESTAMP``, or + ``DATETIME`` BigQuery column. + + For Datastore. Valid data types of the timestamp field are: + ``TIMESTAMP``. Datastore entity will be scanned if the + timestamp property does not exist or its value is empty or + invalid. + enable_auto_population_of_timespan_config (bool): + When the job is started by a JobTrigger we will + automatically figure out a valid start_time to avoid + scanning files that have not been modified since the last + time the JobTrigger executed. This will be based on the time + of the execution of the last run of the JobTrigger. + """ + + start_time = proto.Field(proto.MESSAGE, number=1, message=timestamp.Timestamp,) + + end_time = proto.Field(proto.MESSAGE, number=2, message=timestamp.Timestamp,) + + timestamp_field = proto.Field(proto.MESSAGE, number=3, message=FieldId,) + + enable_auto_population_of_timespan_config = proto.Field(proto.BOOL, number=4) + + datastore_options = proto.Field( + proto.MESSAGE, number=2, oneof="type", message=DatastoreOptions, + ) + + cloud_storage_options = proto.Field( + proto.MESSAGE, number=3, oneof="type", message=CloudStorageOptions, + ) + + big_query_options = proto.Field( + proto.MESSAGE, number=4, oneof="type", message=BigQueryOptions, + ) + + hybrid_options = proto.Field( + proto.MESSAGE, number=9, oneof="type", message="HybridOptions", + ) + + timespan_config = proto.Field(proto.MESSAGE, number=6, message=TimespanConfig,) + + +class HybridOptions(proto.Message): + r"""Configuration to control jobs where the content being + inspected is outside of Google Cloud Platform. + + Attributes: + description (str): + A short description of where the data is + coming from. Will be stored once in the job. 256 + max length. + required_finding_label_keys (Sequence[str]): + These are labels that each inspection request must include + within their 'finding_labels' map. Request may contain + others, but any missing one of these will be rejected. + + Label keys must be between 1 and 63 characters long and must + conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + + No more than 10 keys can be required. + labels (Sequence[~.storage.HybridOptions.LabelsEntry]): + To organize findings, these labels will be added to each + finding. + + Label keys must be between 1 and 63 characters long and must + conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + + Label values must be between 0 and 63 characters long and + must conform to the regular expression + ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. + + No more than 10 labels can be associated with a given + finding. + + Examples: + + - ``"environment" : "production"`` + - ``"pipeline" : "etl"`` + table_options (~.storage.TableOptions): + If the container is a table, additional + information to make findings meaningful such as + the columns that are primary keys. + """ + + description = proto.Field(proto.STRING, number=1) + + required_finding_label_keys = proto.RepeatedField(proto.STRING, number=2) + + labels = proto.MapField(proto.STRING, proto.STRING, number=3) + + table_options = proto.Field(proto.MESSAGE, number=4, message="TableOptions",) + + +class BigQueryKey(proto.Message): + r"""Row key for identifying a record in BigQuery table. + + Attributes: + table_reference (~.storage.BigQueryTable): + Complete BigQuery table reference. + row_number (int): + Row number inferred at the time the table was scanned. This + value is nondeterministic, cannot be queried, and may be + null for inspection jobs. To locate findings within a table, + specify + ``inspect_job.storage_config.big_query_options.identifying_fields`` + in ``CreateDlpJobRequest``. + """ + + table_reference = proto.Field(proto.MESSAGE, number=1, message="BigQueryTable",) + + row_number = proto.Field(proto.INT64, number=2) + + +class DatastoreKey(proto.Message): + r"""Record key for a finding in Cloud Datastore. + + Attributes: + entity_key (~.storage.Key): + Datastore entity key. + """ + + entity_key = proto.Field(proto.MESSAGE, number=1, message="Key",) + + +class Key(proto.Message): + r"""A unique identifier for a Datastore entity. + If a key's partition ID or any of its path kinds or names are + reserved/read-only, the key is reserved/read-only. + A reserved/read-only key is forbidden in certain documented + contexts. + + Attributes: + partition_id (~.storage.PartitionId): + Entities are partitioned into subsets, + currently identified by a project ID and + namespace ID. Queries are scoped to a single + partition. + path (Sequence[~.storage.Key.PathElement]): + The entity path. An entity path consists of one or more + elements composed of a kind and a string or numerical + identifier, which identify entities. The first element + identifies a *root entity*, the second element identifies a + *child* of the root entity, the third element identifies a + child of the second entity, and so forth. The entities + identified by all prefixes of the path are called the + element's *ancestors*. + + A path can never be empty, and a path can have at most 100 + elements. + """ + + class PathElement(proto.Message): + r"""A (kind, ID/name) pair used to construct a key path. + If either name or ID is set, the element is complete. If neither + is set, the element is incomplete. + + Attributes: + kind (str): + The kind of the entity. A kind matching regex ``__.*__`` is + reserved/read-only. A kind must not contain more than 1500 + bytes when UTF-8 encoded. Cannot be ``""``. + id (int): + The auto-allocated ID of the entity. + Never equal to zero. Values less than zero are + discouraged and may not be supported in the + future. + name (str): + The name of the entity. A name matching regex ``__.*__`` is + reserved/read-only. A name must not be more than 1500 bytes + when UTF-8 encoded. Cannot be ``""``. + """ + + kind = proto.Field(proto.STRING, number=1) + + id = proto.Field(proto.INT64, number=2, oneof="id_type") + + name = proto.Field(proto.STRING, number=3, oneof="id_type") + + partition_id = proto.Field(proto.MESSAGE, number=1, message=PartitionId,) + + path = proto.RepeatedField(proto.MESSAGE, number=2, message=PathElement,) + + +class RecordKey(proto.Message): + r"""Message for a unique key indicating a record that contains a + finding. + + Attributes: + datastore_key (~.storage.DatastoreKey): + + big_query_key (~.storage.BigQueryKey): + + id_values (Sequence[str]): + Values of identifying columns in the given row. Order of + values matches the order of ``identifying_fields`` specified + in the scanning request. + """ + + datastore_key = proto.Field( + proto.MESSAGE, number=2, oneof="type", message=DatastoreKey, + ) + + big_query_key = proto.Field( + proto.MESSAGE, number=3, oneof="type", message=BigQueryKey, + ) + + id_values = proto.RepeatedField(proto.STRING, number=5) + + +class BigQueryTable(proto.Message): + r"""Message defining the location of a BigQuery table. A table is + uniquely identified by its project_id, dataset_id, and table_name. + Within a query a table is often referenced with a string in the + format of: ``:.`` or + ``..``. + + Attributes: + project_id (str): + The Google Cloud Platform project ID of the + project containing the table. If omitted, + project ID is inferred from the API call. + dataset_id (str): + Dataset ID of the table. + table_id (str): + Name of the table. + """ + + project_id = proto.Field(proto.STRING, number=1) + + dataset_id = proto.Field(proto.STRING, number=2) + + table_id = proto.Field(proto.STRING, number=3) + + +class BigQueryField(proto.Message): + r"""Message defining a field of a BigQuery table. + + Attributes: + table (~.storage.BigQueryTable): + Source table of the field. + field (~.storage.FieldId): + Designated field in the BigQuery table. + """ + + table = proto.Field(proto.MESSAGE, number=1, message=BigQueryTable,) + + field = proto.Field(proto.MESSAGE, number=2, message=FieldId,) + + +class EntityId(proto.Message): + r"""An entity in a dataset is a field or set of fields that correspond + to a single person. For example, in medical records the ``EntityId`` + might be a patient identifier, or for financial records it might be + an account identifier. This message is used when generalizations or + analysis must take into account that multiple rows correspond to the + same entity. + + Attributes: + field (~.storage.FieldId): + Composite key indicating which field contains + the entity identifier. + """ + + field = proto.Field(proto.MESSAGE, number=1, message=FieldId,) + + +class TableOptions(proto.Message): + r"""Instructions regarding the table content being inspected. + + Attributes: + identifying_fields (Sequence[~.storage.FieldId]): + The columns that are the primary keys for + table objects included in ContentItem. A copy of + this cell's value will stored alongside + alongside each finding so that the finding can + be traced to the specific row it came from. No + more than 3 may be provided. + """ + + identifying_fields = proto.RepeatedField(proto.MESSAGE, number=1, message=FieldId,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 00000000..4505b485 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.6 +namespace_packages = True diff --git a/noxfile.py b/noxfile.py index e27f448f..2ad172f6 100644 --- a/noxfile.py +++ b/noxfile.py @@ -27,8 +27,8 @@ BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" -SYSTEM_TEST_PYTHON_VERSIONS = ["2.7", "3.8"] -UNIT_TEST_PYTHON_VERSIONS = ["2.7", "3.5", "3.6", "3.7", "3.8"] +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] +UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"] @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -70,6 +70,8 @@ def lint_setup_py(session): def default(session): # Install all test dependencies, then install this package in-place. + session.install("asyncmock", "pytest-asyncio") + session.install("mock", "pytest", "pytest-cov") session.install("-e", ".") @@ -140,7 +142,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=73") + session.run("coverage", "report", "--show-missing", "--fail-under=100") session.run("coverage", "erase") diff --git a/samples/snippets/custom_infotype.py b/samples/snippets/custom_infotype.py index 565fed69..3f63c806 100644 --- a/samples/snippets/custom_infotype.py +++ b/samples/snippets/custom_infotype.py @@ -20,8 +20,7 @@ # [START dlp_omit_name_if_also_email] def omit_name_if_also_email( - project, - content_string, + project, content_string, ): """Marches PERSON_NAME and EMAIL_ADDRESS, but not both. @@ -51,33 +50,34 @@ def omit_name_if_also_email( # the total number of findings when there is a large overlap between different # infoTypes. inspect_config = { - "info_types": - info_types_to_locate, - "rule_set": [{ - "info_types": [{ - "name": "PERSON_NAME" - }], - "rules": [{ - "exclusion_rule": { - "exclude_info_types": { - "info_types": [{ - "name": "EMAIL_ADDRESS" - }] - }, - "matching_type": "MATCHING_TYPE_PARTIAL_MATCH" - } - }] - }] + "info_types": info_types_to_locate, + "rule_set": [ + { + "info_types": [{"name": "PERSON_NAME"}], + "rules": [ + { + "exclusion_rule": { + "exclude_info_types": { + "info_types": [{"name": "EMAIL_ADDRESS"}] + }, + "matching_type": google.cloud.dlp_v2.MatchingType.MATCHING_TYPE_PARTIAL_MATCH, + } + } + ], + } + ], } # Construct the `item`. item = {"value": content_string} # Convert the project id into a full resource id. - parent = dlp.project_path(project) + parent = f"projects/{project}" # Call the API. - response = dlp.inspect_content(parent, inspect_config, item) + response = dlp.inspect_content( + request={"parent": parent, "inspect_config": inspect_config, "item": item} + ) return [f.info_type.name for f in response.result.findings] @@ -87,9 +87,7 @@ def omit_name_if_also_email( # [START inspect_with_person_name_w_custom_hotword] def inspect_with_person_name_w_custom_hotword( - project, - content_string, - custom_hotword="patient" + project, content_string, custom_hotword="patient" ): """Uses the Data Loss Prevention API increase likelihood for matches on PERSON_NAME if the user specified custom hotword is present. Only @@ -114,7 +112,9 @@ def inspect_with_person_name_w_custom_hotword( # window preceding the PII finding. hotword_rule = { "hotword_regex": {"pattern": custom_hotword}, - "likelihood_adjustment": {"fixed_likelihood": "VERY_LIKELY"}, + "likelihood_adjustment": { + "fixed_likelihood": google.cloud.dlp_v2.Likelihood.VERY_LIKELY + }, "proximity": {"window_before": 50}, } @@ -128,17 +128,19 @@ def inspect_with_person_name_w_custom_hotword( # Construct the configuration dictionary with the custom regex info type. inspect_config = { "rule_set": rule_set, - "min_likelihood": "VERY_LIKELY", + "min_likelihood": google.cloud.dlp_v2.Likelihood.VERY_LIKELY, } # Construct the `item`. item = {"value": content_string} # Convert the project id into a full resource id. - parent = dlp.project_path(project) + parent = f"projects/{project}" # Call the API. - response = dlp.inspect_content(parent, inspect_config, item) + response = dlp.inspect_content( + request={"parent": parent, "inspect_config": inspect_config, "item": item} + ) # Print out the results. if response.result.findings: @@ -153,13 +155,13 @@ def inspect_with_person_name_w_custom_hotword( else: print("No findings.") + # [END inspect_with_person_name_w_custom_hotword] # [START dlp_inspect_with_medical_record_number_custom_regex_detector] def inspect_with_medical_record_number_custom_regex_detector( - project, - content_string, + project, content_string, ): """Uses the Data Loss Prevention API to analyze string with medical record number custom regex detector @@ -183,7 +185,7 @@ def inspect_with_medical_record_number_custom_regex_detector( { "info_type": {"name": "C_MRN"}, "regex": {"pattern": "[1-9]{3}-[1-9]{1}-[1-9]{5}"}, - "likelihood": "POSSIBLE", + "likelihood": google.cloud.dlp_v2.Likelihood.POSSIBLE, } ] @@ -196,10 +198,12 @@ def inspect_with_medical_record_number_custom_regex_detector( item = {"value": content_string} # Convert the project id into a full resource id. - parent = dlp.project_path(project) + parent = f"projects/{project}" # Call the API. - response = dlp.inspect_content(parent, inspect_config, item) + response = dlp.inspect_content( + request={"parent": parent, "inspect_config": inspect_config, "item": item} + ) # Print out the results. if response.result.findings: @@ -214,13 +218,13 @@ def inspect_with_medical_record_number_custom_regex_detector( else: print("No findings.") + # [END dlp_inspect_with_medical_record_number_custom_regex_detector] # [START dlp_inspect_with_medical_record_number_w_custom_hotwords] def inspect_with_medical_record_number_w_custom_hotwords( - project, - content_string, + project, content_string, ): """Uses the Data Loss Prevention API to analyze string with medical record number custom regex detector, with custom hotwords rules to boost finding @@ -245,7 +249,7 @@ def inspect_with_medical_record_number_w_custom_hotwords( { "info_type": {"name": "C_MRN"}, "regex": {"pattern": "[1-9]{3}-[1-9]{1}-[1-9]{5}"}, - "likelihood": "POSSIBLE", + "likelihood": google.cloud.dlp_v2.Likelihood.POSSIBLE, } ] @@ -253,22 +257,15 @@ def inspect_with_medical_record_number_w_custom_hotwords( # boost to VERY_LIKELY when hotwords are present within the 10 character- # window preceding the PII finding. hotword_rule = { - "hotword_regex": { - "pattern": "(?i)(mrn|medical)(?-i)" - }, + "hotword_regex": {"pattern": "(?i)(mrn|medical)(?-i)"}, "likelihood_adjustment": { - "fixed_likelihood": "VERY_LIKELY" + "fixed_likelihood": google.cloud.dlp_v2.Likelihood.VERY_LIKELY }, - "proximity": { - "window_before": 10 - } + "proximity": {"window_before": 10}, } rule_set = [ - { - "info_types": [{"name": "C_MRN"}], - "rules": [{"hotword_rule": hotword_rule}], - } + {"info_types": [{"name": "C_MRN"}], "rules": [{"hotword_rule": hotword_rule}]} ] # Construct the configuration dictionary with the custom regex info type. @@ -281,10 +278,12 @@ def inspect_with_medical_record_number_w_custom_hotwords( item = {"value": content_string} # Convert the project id into a full resource id. - parent = dlp.project_path(project) + parent = f"projects/{project}" # Call the API. - response = dlp.inspect_content(parent, inspect_config, item) + response = dlp.inspect_content( + request={"parent": parent, "inspect_config": inspect_config, "item": item} + ) # Print out the results. if response.result.findings: @@ -299,4 +298,5 @@ def inspect_with_medical_record_number_w_custom_hotwords( else: print("No findings.") + # [END dlp_inspect_with_medical_record_number_w_custom_hotwords] diff --git a/samples/snippets/custom_infotype_test.py b/samples/snippets/custom_infotype_test.py index 4a81df60..d1fb3326 100644 --- a/samples/snippets/custom_infotype_test.py +++ b/samples/snippets/custom_infotype_test.py @@ -21,7 +21,8 @@ def test_omit_name_if_also_email(capsys): info_types = custom_infotype.omit_name_if_also_email( - GCLOUD_PROJECT, "alice@example.com") + GCLOUD_PROJECT, "alice@example.com" + ) # Ensure we found only EMAIL_ADDRESS, and not PERSON_NAME. assert len(info_types) == 1 @@ -30,7 +31,8 @@ def test_omit_name_if_also_email(capsys): def test_inspect_with_person_name_w_custom_hotword(capsys): custom_infotype.inspect_with_person_name_w_custom_hotword( - GCLOUD_PROJECT, "patient's name is John Doe.", "patient") + GCLOUD_PROJECT, "patient's name is John Doe.", "patient" + ) out, _ = capsys.readouterr() assert "Info type: PERSON_NAME" in out @@ -39,26 +41,27 @@ def test_inspect_with_person_name_w_custom_hotword(capsys): def test_inspect_with_medical_record_number_custom_regex_detector(capsys): custom_infotype.inspect_with_medical_record_number_custom_regex_detector( - GCLOUD_PROJECT, "Patients MRN 444-5-22222") + GCLOUD_PROJECT, "Patients MRN 444-5-22222" + ) out, _ = capsys.readouterr() assert "Info type: C_MRN" in out -def test_inspect_with_medical_record_number_w_custom_hotwords_no_hotwords( - capsys): +def test_inspect_with_medical_record_number_w_custom_hotwords_no_hotwords(capsys): custom_infotype.inspect_with_medical_record_number_w_custom_hotwords( - GCLOUD_PROJECT, "just a number 444-5-22222") + GCLOUD_PROJECT, "just a number 444-5-22222" + ) out, _ = capsys.readouterr() assert "Info type: C_MRN" in out assert "Likelihood: 3" in out -def test_inspect_with_medical_record_number_w_custom_hotwords_has_hotwords( - capsys): +def test_inspect_with_medical_record_number_w_custom_hotwords_has_hotwords(capsys): custom_infotype.inspect_with_medical_record_number_w_custom_hotwords( - GCLOUD_PROJECT, "Patients MRN 444-5-22222") + GCLOUD_PROJECT, "Patients MRN 444-5-22222" + ) out, _ = capsys.readouterr() assert "Info type: C_MRN" in out diff --git a/samples/snippets/deid.py b/samples/snippets/deid.py index 70bd1623..89b8168f 100644 --- a/samples/snippets/deid.py +++ b/samples/snippets/deid.py @@ -43,12 +43,10 @@ def deidentify_with_mask( dlp = google.cloud.dlp_v2.DlpServiceClient() # Convert the project id into a full resource id. - parent = dlp.project_path(project) + parent = f"projects/{project}" # Construct inspect configuration dictionary - inspect_config = { - "info_types": [{"name": info_type} for info_type in info_types] - } + inspect_config = {"info_types": [{"name": info_type} for info_type in info_types]} # Construct deidentify configuration dictionary deidentify_config = { @@ -71,10 +69,12 @@ def deidentify_with_mask( # Call the API response = dlp.deidentify_content( - parent, - inspect_config=inspect_config, - deidentify_config=deidentify_config, - item=item, + request={ + "parent": parent, + "deidentify_config": deidentify_config, + "inspect_config": inspect_config, + "item": item, + } ) # Print out the results. @@ -85,9 +85,7 @@ def deidentify_with_mask( # [START dlp_deidentify_redact] def deidentify_with_redact( - project, - input_str, - info_types, + project, input_str, info_types, ): """Uses the Data Loss Prevention API to deidentify sensitive data in a string by redacting matched input values. @@ -104,23 +102,15 @@ def deidentify_with_redact( dlp = google.cloud.dlp_v2.DlpServiceClient() # Convert the project id into a full resource id. - parent = dlp.project_path(project) + parent = f"projects/{project}" # Construct inspect configuration dictionary - inspect_config = { - "info_types": [{"name": info_type} for info_type in info_types] - } + inspect_config = {"info_types": [{"name": info_type} for info_type in info_types]} # Construct deidentify configuration dictionary deidentify_config = { "info_type_transformations": { - "transformations": [ - { - "primitive_transformation": { - "redact_config": {} - } - } - ] + "transformations": [{"primitive_transformation": {"redact_config": {}}}] } } @@ -129,10 +119,12 @@ def deidentify_with_redact( # Call the API response = dlp.deidentify_content( - parent, - inspect_config=inspect_config, - deidentify_config=deidentify_config, - item=item, + request={ + "parent": parent, + "deidentify_config": deidentify_config, + "inspect_config": inspect_config, + "item": item, + } ) # Print out the results. @@ -143,10 +135,7 @@ def deidentify_with_redact( # [START dlp_deidentify_replace] def deidentify_with_replace( - project, - input_str, - info_types, - replacement_str="REPLACEMENT_STR", + project, input_str, info_types, replacement_str="REPLACEMENT_STR", ): """Uses the Data Loss Prevention API to deidentify sensitive data in a string by replacing matched input values with a value you specify. @@ -165,12 +154,10 @@ def deidentify_with_replace( dlp = google.cloud.dlp_v2.DlpServiceClient() # Convert the project id into a full resource id. - parent = dlp.project_path(project) + parent = f"projects/{project}" # Construct inspect configuration dictionary - inspect_config = { - "info_types": [{"name": info_type} for info_type in info_types] - } + inspect_config = {"info_types": [{"name": info_type} for info_type in info_types]} # Construct deidentify configuration dictionary deidentify_config = { @@ -179,9 +166,7 @@ def deidentify_with_replace( { "primitive_transformation": { "replace_config": { - "new_value": { - "string_value": replacement_str, - } + "new_value": {"string_value": replacement_str} } } } @@ -194,15 +179,18 @@ def deidentify_with_replace( # Call the API response = dlp.deidentify_content( - parent, - inspect_config=inspect_config, - deidentify_config=deidentify_config, - item=item, + request={ + "parent": parent, + "deidentify_config": deidentify_config, + "inspect_config": inspect_config, + "item": item, + } ) # Print out the results. print(response.item.value) + # [END dlp_deidentify_replace] # [START dlp_deidentify_fpe] @@ -245,7 +233,7 @@ def deidentify_with_fpe( dlp = google.cloud.dlp_v2.DlpServiceClient() # Convert the project id into a full resource id. - parent = dlp.project_path(project) + parent = f"projects/{project}" # The wrapped key is base64-encoded, but the library expects a binary # string, so decode it here. @@ -256,24 +244,17 @@ def deidentify_with_fpe( # Construct FPE configuration dictionary crypto_replace_ffx_fpe_config = { "crypto_key": { - "kms_wrapped": { - "wrapped_key": wrapped_key, - "crypto_key_name": key_name, - } + "kms_wrapped": {"wrapped_key": wrapped_key, "crypto_key_name": key_name} }, "common_alphabet": alphabet, } # Add surrogate type if surrogate_type: - crypto_replace_ffx_fpe_config["surrogate_info_type"] = { - "name": surrogate_type - } + crypto_replace_ffx_fpe_config["surrogate_info_type"] = {"name": surrogate_type} # Construct inspect configuration dictionary - inspect_config = { - "info_types": [{"name": info_type} for info_type in info_types] - } + inspect_config = {"info_types": [{"name": info_type} for info_type in info_types]} # Construct deidentify configuration dictionary deidentify_config = { @@ -293,10 +274,12 @@ def deidentify_with_fpe( # Call the API response = dlp.deidentify_content( - parent, - inspect_config=inspect_config, - deidentify_config=deidentify_config, - item=item, + request={ + "parent": parent, + "deidentify_config": deidentify_config, + "inspect_config": inspect_config, + "item": item, + } ) # Print results @@ -341,7 +324,7 @@ def reidentify_with_fpe( dlp = google.cloud.dlp_v2.DlpServiceClient() # Convert the project id into a full resource id. - parent = dlp.project_path(project) + parent = f"projects/{project}" # The wrapped key is base64-encoded, but the library expects a binary # string, so decode it here. @@ -382,10 +365,12 @@ def reidentify_with_fpe( # Call the API response = dlp.reidentify_content( - parent, - inspect_config=inspect_config, - reidentify_config=reidentify_config, - item=item, + request={ + "parent": parent, + "reidentify_config": reidentify_config, + "inspect_config": inspect_config, + "item": item, + } ) # Print results @@ -428,7 +413,7 @@ def deidentify_free_text_with_fpe_using_surrogate( dlp = google.cloud.dlp_v2.DlpServiceClient() # Convert the project id into a full resource id. - parent = dlp.project_path(project) + parent = f"projects/{project}" # The unwrapped key is base64-encoded, but the library expects a binary # string, so decode it here. @@ -441,26 +426,22 @@ def deidentify_free_text_with_fpe_using_surrogate( "info_types": [{"name": info_type}], "primitive_transformation": { "crypto_replace_ffx_fpe_config": { - "crypto_key": { - "unwrapped": {"key": unwrapped_key} - }, + "crypto_key": {"unwrapped": {"key": unwrapped_key}}, "common_alphabet": alphabet, "surrogate_info_type": {"name": surrogate_type}, } - } + }, } deidentify_config = { - "info_type_transformations": { - "transformations": [transformation] - } + "info_type_transformations": {"transformations": [transformation]} } # Construct the inspect config, trying to finding all PII with likelihood # higher than UNLIKELY inspect_config = { "info_types": [{"name": info_type}], - "min_likelihood": "UNLIKELY" + "min_likelihood": google.cloud.dlp_v2.Likelihood.UNLIKELY, } # Convert string to item @@ -468,10 +449,12 @@ def deidentify_free_text_with_fpe_using_surrogate( # Call the API response = dlp.deidentify_content( - parent, - inspect_config=inspect_config, - deidentify_config=deidentify_config, - item=item, + request={ + "parent": parent, + "deidentify_config": deidentify_config, + "inspect_config": inspect_config, + "item": item, + } ) # Print results @@ -511,7 +494,7 @@ def reidentify_free_text_with_fpe_using_surrogate( dlp = google.cloud.dlp_v2.DlpServiceClient() # Convert the project id into a full resource id. - parent = dlp.project_path(project) + parent = f"projects/{project}" # The unwrapped key is base64-encoded, but the library expects a binary # string, so decode it here. @@ -523,9 +506,7 @@ def reidentify_free_text_with_fpe_using_surrogate( transformation = { "primitive_transformation": { "crypto_replace_ffx_fpe_config": { - "crypto_key": { - "unwrapped": {"key": unwrapped_key} - }, + "crypto_key": {"unwrapped": {"key": unwrapped_key}}, "common_alphabet": alphabet, "surrogate_info_type": {"name": surrogate_type}, } @@ -533,9 +514,7 @@ def reidentify_free_text_with_fpe_using_surrogate( } reidentify_config = { - "info_type_transformations": { - "transformations": [transformation] - } + "info_type_transformations": {"transformations": [transformation]} } inspect_config = { @@ -549,10 +528,12 @@ def reidentify_free_text_with_fpe_using_surrogate( # Call the API response = dlp.reidentify_content( - parent, - inspect_config=inspect_config, - reidentify_config=reidentify_config, - item=item, + request={ + "parent": parent, + "reidentify_config": reidentify_config, + "inspect_config": inspect_config, + "item": item, + } ) # Print results @@ -608,7 +589,7 @@ def deidentify_with_date_shift( dlp = google.cloud.dlp_v2.DlpServiceClient() # Convert the project id into a full resource id. - parent = dlp.project_path(project) + parent = f"projects/{project}" # Convert date field list to Protobuf type def map_fields(field): @@ -637,11 +618,7 @@ def map_data(value): try: date = datetime.strptime(value, "%m/%d/%Y") return { - "date_value": { - "year": date.year, - "month": date.month, - "day": date.day, - } + "date_value": {"year": date.year, "month": date.month, "day": date.day} } except ValueError: return {"string_value": value} @@ -709,7 +686,11 @@ def write_data(data): # Call the API response = dlp.deidentify_content( - parent, deidentify_config=deidentify_config, item=table_item + request={ + "parent": parent, + "deidentify_config": deidentify_config, + "item": table_item, + } ) # Write results to CSV file @@ -745,32 +726,28 @@ def deidentify_with_replace_infotype(project, item, info_types): dlp = google.cloud.dlp_v2.DlpServiceClient() # Convert the project id into a full resource id. - parent = dlp.project_path(project) + parent = f"projects/{project}" # Construct inspect configuration dictionary - inspect_config = { - "info_types": [{"name": info_type} for info_type in info_types] - } + inspect_config = {"info_types": [{"name": info_type} for info_type in info_types]} # Construct deidentify configuration dictionary deidentify_config = { "info_type_transformations": { "transformations": [ - { - "primitive_transformation": { - "replace_with_info_type_config": {} - } - } + {"primitive_transformation": {"replace_with_info_type_config": {}}} ] } } # Call the API response = dlp.deidentify_content( - parent, - inspect_config=inspect_config, - deidentify_config=deidentify_config, - item={"value": item}, + request={ + "parent": parent, + "deidentify_config": deidentify_config, + "inspect_config": inspect_config, + "item": {"value": item}, + } ) # Print out the results. @@ -789,8 +766,7 @@ def deidentify_with_replace_infotype(project, item, info_types): mask_parser = subparsers.add_parser( "deid_mask", - help="Deidentify sensitive data in a string by masking it with a " - "character.", + help="Deidentify sensitive data in a string by masking it with a " "character.", ) mask_parser.add_argument( "--info_types", @@ -802,8 +778,7 @@ def deidentify_with_replace_infotype(project, item, info_types): default=["FIRST_NAME", "LAST_NAME", "EMAIL_ADDRESS"], ) mask_parser.add_argument( - "project", - help="The Google Cloud project id to use as a parent resource.", + "project", help="The Google Cloud project id to use as a parent resource.", ) mask_parser.add_argument("item", help="The string to deidentify.") mask_parser.add_argument( @@ -836,12 +811,12 @@ def deidentify_with_replace_infotype(project, item, info_types): default=["FIRST_NAME", "LAST_NAME", "EMAIL_ADDRESS"], ) replace_parser.add_argument( - "project", - help="The Google Cloud project id to use as a parent resource.", + "project", help="The Google Cloud project id to use as a parent resource.", ) replace_parser.add_argument("item", help="The string to deidentify.") - replace_parser.add_argument("replacement_str", help="The string to " - "replace all matched values with.") + replace_parser.add_argument( + "replacement_str", help="The string to " "replace all matched values with." + ) fpe_parser = subparsers.add_parser( "deid_fpe", @@ -858,13 +833,11 @@ def deidentify_with_replace_infotype(project, item, info_types): default=["FIRST_NAME", "LAST_NAME", "EMAIL_ADDRESS"], ) fpe_parser.add_argument( - "project", - help="The Google Cloud project id to use as a parent resource.", + "project", help="The Google Cloud project id to use as a parent resource.", ) fpe_parser.add_argument( "item", - help="The string to deidentify. " - "Example: string = 'My SSN is 372819127'", + help="The string to deidentify. " "Example: string = 'My SSN is 372819127'", ) fpe_parser.add_argument( "key_name", @@ -902,13 +875,11 @@ def deidentify_with_replace_infotype(project, item, info_types): "Encryption (FPE).", ) reid_parser.add_argument( - "project", - help="The Google Cloud project id to use as a parent resource.", + "project", help="The Google Cloud project id to use as a parent resource.", ) reid_parser.add_argument( "item", - help="The string to deidentify. " - "Example: string = 'My SSN is 372819127'", + help="The string to deidentify. " "Example: string = 'My SSN is 372819127'", ) reid_parser.add_argument( "surrogate_type", @@ -944,8 +915,7 @@ def deidentify_with_replace_infotype(project, item, info_types): help="Deidentify dates in a CSV file by pseudorandomly shifting them.", ) date_shift_parser.add_argument( - "project", - help="The Google Cloud project id to use as a parent resource.", + "project", help="The Google Cloud project id to use as a parent resource.", ) date_shift_parser.add_argument( "input_csv_file", @@ -996,7 +966,7 @@ def deidentify_with_replace_infotype(project, item, info_types): replace_with_infotype_parser = subparsers.add_parser( "replace_with_infotype", help="Deidentify sensitive data in a string by replacing it with the " - "info type of the data." + "info type of the data.", ) replace_with_infotype_parser.add_argument( "--info_types", @@ -1008,8 +978,7 @@ def deidentify_with_replace_infotype(project, item, info_types): default=["FIRST_NAME", "LAST_NAME", "EMAIL_ADDRESS"], ) replace_with_infotype_parser.add_argument( - "project", - help="The Google Cloud project id to use as a parent resource.", + "project", help="The Google Cloud project id to use as a parent resource.", ) replace_with_infotype_parser.add_argument( "item", @@ -1067,7 +1036,5 @@ def deidentify_with_replace_infotype(project, item, info_types): ) elif args.content == "replace_with_infotype": deidentify_with_replace_infotype( - args.project, - item=args.item, - info_types=args.info_types, + args.project, item=args.item, info_types=args.info_types, ) diff --git a/samples/snippets/deid_test.py b/samples/snippets/deid_test.py index 7d886c51..1863f754 100644 --- a/samples/snippets/deid_test.py +++ b/samples/snippets/deid_test.py @@ -16,6 +16,8 @@ import shutil import tempfile +import google.cloud.dlp_v2 + import pytest import deid @@ -79,10 +81,7 @@ def test_deidentify_with_mask_masking_character_specified(capsys): def test_deidentify_with_mask_masking_number_specified(capsys): deid.deidentify_with_mask( - GCLOUD_PROJECT, - HARMFUL_STRING, - ["US_SOCIAL_SECURITY_NUMBER"], - number_to_mask=7, + GCLOUD_PROJECT, HARMFUL_STRING, ["US_SOCIAL_SECURITY_NUMBER"], number_to_mask=7, ) out, _ = capsys.readouterr() @@ -99,8 +98,10 @@ def test_deidentify_with_redact(capsys): def test_deidentify_with_replace(capsys): deid.deidentify_with_replace( - GCLOUD_PROJECT, HARMFUL_STRING, ["US_SOCIAL_SECURITY_NUMBER"], - replacement_str="REPLACEMENT_STR" + GCLOUD_PROJECT, + HARMFUL_STRING, + ["US_SOCIAL_SECURITY_NUMBER"], + replacement_str="REPLACEMENT_STR", ) out, _ = capsys.readouterr() @@ -112,7 +113,7 @@ def test_deidentify_with_fpe(capsys): GCLOUD_PROJECT, HARMFUL_STRING, ["US_SOCIAL_SECURITY_NUMBER"], - alphabet="NUMERIC", + alphabet=google.cloud.dlp_v2.CharsToIgnore.CommonCharsToIgnore.NUMERIC, wrapped_key=WRAPPED_KEY, key_name=KEY_NAME, ) @@ -127,7 +128,7 @@ def test_deidentify_with_fpe_uses_surrogate_info_types(capsys): GCLOUD_PROJECT, HARMFUL_STRING, ["US_SOCIAL_SECURITY_NUMBER"], - alphabet="NUMERIC", + alphabet=google.cloud.dlp_v2.CharsToIgnore.CommonCharsToIgnore.NUMERIC, wrapped_key=WRAPPED_KEY, key_name=KEY_NAME, surrogate_type=SURROGATE_TYPE, @@ -143,7 +144,7 @@ def test_deidentify_with_fpe_ignores_insensitive_data(capsys): GCLOUD_PROJECT, HARMLESS_STRING, ["US_SOCIAL_SECURITY_NUMBER"], - alphabet="NUMERIC", + alphabet=google.cloud.dlp_v2.CharsToIgnore.CommonCharsToIgnore.NUMERIC, wrapped_key=WRAPPED_KEY, key_name=KEY_NAME, ) @@ -198,7 +199,7 @@ def test_reidentify_with_fpe(capsys): surrogate_type=SURROGATE_TYPE, wrapped_key=WRAPPED_KEY, key_name=KEY_NAME, - alphabet="NUMERIC", + alphabet=google.cloud.dlp_v2.CharsToIgnore.CommonCharsToIgnore.NUMERIC, ) out, _ = capsys.readouterr() @@ -215,7 +216,7 @@ def test_deidentify_free_text_with_fpe_using_surrogate(capsys): info_type="PHONE_NUMBER", surrogate_type="PHONE_TOKEN", unwrapped_key=UNWRAPPED_KEY, - alphabet="NUMERIC", + alphabet=google.cloud.dlp_v2.CharsToIgnore.CommonCharsToIgnore.NUMERIC, ) out, _ = capsys.readouterr() @@ -233,7 +234,7 @@ def test_reidentify_free_text_with_fpe_using_surrogate(capsys): labeled_fpe_string, surrogate_type="PHONE_TOKEN", unwrapped_key=UNWRAPPED_KEY, - alphabet="NUMERIC", + alphabet=google.cloud.dlp_v2.CharsToIgnore.CommonCharsToIgnore.NUMERIC, ) out, _ = capsys.readouterr() @@ -246,9 +247,7 @@ def test_reidentify_free_text_with_fpe_using_surrogate(capsys): def test_deidentify_with_replace_infotype(capsys): url_to_redact = "https://cloud.google.com" deid.deidentify_with_replace_infotype( - GCLOUD_PROJECT, - "My favorite site is " + url_to_redact, - ["URL"], + GCLOUD_PROJECT, "My favorite site is " + url_to_redact, ["URL"], ) out, _ = capsys.readouterr() diff --git a/samples/snippets/inspect_content.py b/samples/snippets/inspect_content.py index fb2573e4..bceb2981 100644 --- a/samples/snippets/inspect_content.py +++ b/samples/snippets/inspect_content.py @@ -24,9 +24,7 @@ # [START dlp_inspect_string_basic] def inspect_string_basic( - project, - content_string, - info_types=["PHONE_NUMBER"], + project, content_string, info_types=["PHONE_NUMBER"], ): """Uses the Data Loss Prevention API to analyze strings for protected data. Args: @@ -58,10 +56,12 @@ def inspect_string_basic( item = {"value": content_string} # Convert the project id into a full resource id. - parent = dlp.project_path(project) + parent = f"projects/{project}" # Call the API. - response = dlp.inspect_content(parent, inspect_config, item) + response = dlp.inspect_content( + request={"parent": parent, "inspect_config": inspect_config, "item": item} + ) # Print out the results. if response.result.findings: @@ -149,10 +149,12 @@ def inspect_string( item = {"value": content_string} # Convert the project id into a full resource id. - parent = dlp.project_path(project) + parent = f"projects/{project}" # Call the API. - response = dlp.inspect_content(parent, inspect_config, item) + response = dlp.inspect_content( + request={"parent": parent, "inspect_config": inspect_config, "item": item} + ) # Print out the results. if response.result.findings: @@ -274,19 +276,19 @@ def inspect_table( headers = [{"name": val} for val in data["header"]] rows = [] for row in data["rows"]: - rows.append( - {"values": [{"string_value": cell_val} for cell_val in row]} - ) + rows.append({"values": [{"string_value": cell_val} for cell_val in row]}) table = {} table["headers"] = headers table["rows"] = rows item = {"table": table} # Convert the project id into a full resource id. - parent = dlp.project_path(project) + parent = f"projects/{project}" # Call the API. - response = dlp.inspect_content(parent, inspect_config, item) + response = dlp.inspect_content( + request={"parent": parent, "inspect_config": inspect_config, "item": item} + ) # Print out the results. if response.result.findings: @@ -402,10 +404,12 @@ def inspect_file( item = {"byte_item": {"type": content_type_index, "data": f.read()}} # Convert the project id into a full resource id. - parent = dlp.project_path(project) + parent = f"projects/{project}" # Call the API. - response = dlp.inspect_content(parent, inspect_config, item) + response = dlp.inspect_content( + request={"parent": parent, "inspect_config": inspect_config, "item": item} + ) # Print out the results. if response.result.findings: @@ -515,7 +519,7 @@ def inspect_gcs_file( # Convert the project id into full resource ids. topic = google.cloud.pubsub.PublisherClient.topic_path(project, topic_id) - parent = dlp.location_path(project, 'global') + parent = f"projects/{project}/locations/global" # Tell the API where to send a notification when the job is complete. actions = [{"pub_sub": {"topic": topic}}] @@ -527,7 +531,9 @@ def inspect_gcs_file( "actions": actions, } - operation = dlp.create_dlp_job(parent, inspect_job=inspect_job) + operation = dlp.create_dlp_job( + request={"parent": parent, "inspect_job": inspect_job} + ) print("Inspection operation started: {}".format(operation.name)) # Create a Pub/Sub client and find the subscription. The subscription is @@ -546,7 +552,7 @@ def callback(message): message.ack() # Now that the job is done, fetch the results and print them. - job = dlp.get_dlp_job(operation.name) + job = dlp.get_dlp_job(request={"name": operation.name}) if job.inspect_details.result.info_type_stats: for finding in job.inspect_details.result.info_type_stats: print( @@ -680,7 +686,7 @@ def inspect_datastore( # Convert the project id into full resource ids. topic = google.cloud.pubsub.PublisherClient.topic_path(project, topic_id) - parent = dlp.location_path(project, 'global') + parent = f"projects/{project}/locations/global" # Tell the API where to send a notification when the job is complete. actions = [{"pub_sub": {"topic": topic}}] @@ -692,7 +698,9 @@ def inspect_datastore( "actions": actions, } - operation = dlp.create_dlp_job(parent, inspect_job=inspect_job) + operation = dlp.create_dlp_job( + request={"parent": parent, "inspect_job": inspect_job} + ) print("Inspection operation started: {}".format(operation.name)) # Create a Pub/Sub client and find the subscription. The subscription is @@ -711,7 +719,7 @@ def callback(message): message.ack() # Now that the job is done, fetch the results and print them. - job = dlp.get_dlp_job(operation.name) + job = dlp.get_dlp_job(request={"name": operation.name}) if job.inspect_details.result.info_type_stats: for finding in job.inspect_details.result.info_type_stats: print( @@ -848,7 +856,7 @@ def inspect_bigquery( # Convert the project id into full resource ids. topic = google.cloud.pubsub.PublisherClient.topic_path(project, topic_id) - parent = dlp.location_path(project, 'global') + parent = f"projects/{project}/locations/global" # Tell the API where to send a notification when the job is complete. actions = [{"pub_sub": {"topic": topic}}] @@ -860,7 +868,9 @@ def inspect_bigquery( "actions": actions, } - operation = dlp.create_dlp_job(parent, inspect_job=inspect_job) + operation = dlp.create_dlp_job( + request={"parent": parent, "inspect_job": inspect_job} + ) print("Inspection operation started: {}".format(operation.name)) # Create a Pub/Sub client and find the subscription. The subscription is @@ -879,7 +889,7 @@ def callback(message): message.ack() # Now that the job is done, fetch the results and print them. - job = dlp.get_dlp_job(operation.name) + job = dlp.get_dlp_job(request={"name": operation.name}) if job.inspect_details.result.info_type_stats: for finding in job.inspect_details.result.info_type_stats: print( @@ -1040,9 +1050,7 @@ def callback(message): ) parser_file = subparsers.add_parser("file", help="Inspect a local file.") - parser_file.add_argument( - "filename", help="The path to the file to inspect." - ) + parser_file.add_argument("filename", help="The path to the file to inspect.") parser_file.add_argument( "--project", help="The Google Cloud project id to use as a parent resource.", @@ -1189,8 +1197,7 @@ def callback(message): help="The Google Cloud project id of the target Datastore.", ) parser_datastore.add_argument( - "kind", - help='The kind of the Datastore entity to inspect, e.g. "Person".', + "kind", help='The kind of the Datastore entity to inspect, e.g. "Person".', ) parser_datastore.add_argument( "topic_id", @@ -1266,8 +1273,7 @@ def callback(message): "bigquery", help="Inspect files on Google BigQuery." ) parser_bigquery.add_argument( - "bigquery_project", - help="The Google Cloud project id of the target table.", + "bigquery_project", help="The Google Cloud project id of the target table.", ) parser_bigquery.add_argument( "dataset_id", help="The ID of the target BigQuery dataset." diff --git a/samples/snippets/inspect_content_test.py b/samples/snippets/inspect_content_test.py index bdabda26..8cb5590b 100644 --- a/samples/snippets/inspect_content_test.py +++ b/samples/snippets/inspect_content_test.py @@ -96,8 +96,7 @@ def subscription_id(topic_id): # Subscribes to a topic. subscriber = google.cloud.pubsub.SubscriberClient() topic_path = subscriber.topic_path(GCLOUD_PROJECT, topic_id) - subscription_path = subscriber.subscription_path( - GCLOUD_PROJECT, SUBSCRIPTION_ID) + subscription_path = subscriber.subscription_path(GCLOUD_PROJECT, SUBSCRIPTION_ID) try: subscriber.create_subscription(subscription_path, topic_path) except google.api_core.exceptions.AlreadyExists: @@ -304,10 +303,9 @@ def test_inspect_image_file(capsys): def cancel_operation(out): if "Inspection operation started" in out: # Cancel the operation - operation_id = out.split( - "Inspection operation started: ")[1].split("\n")[0] + operation_id = out.split("Inspection operation started: ")[1].split("\n")[0] client = google.cloud.dlp_v2.DlpServiceClient() - client.cancel_dlp_job(operation_id) + client.cancel_dlp_job(request={"name": operation_id}) @pytest.mark.flaky(max_runs=2, min_passes=1) @@ -320,7 +318,7 @@ def test_inspect_gcs_file(bucket, topic_id, subscription_id, capsys): topic_id, subscription_id, ["EMAIL_ADDRESS", "PHONE_NUMBER"], - timeout=TIMEOUT + timeout=TIMEOUT, ) out, _ = capsys.readouterr() @@ -331,7 +329,8 @@ def test_inspect_gcs_file(bucket, topic_id, subscription_id, capsys): @pytest.mark.flaky(max_runs=2, min_passes=1) def test_inspect_gcs_file_with_custom_info_types( - bucket, topic_id, subscription_id, capsys): + bucket, topic_id, subscription_id, capsys +): try: dictionaries = ["gary@somedomain.com"] regexes = ["\\(\\d{3}\\) \\d{3}-\\d{4}"] @@ -345,7 +344,8 @@ def test_inspect_gcs_file_with_custom_info_types( [], custom_dictionaries=dictionaries, custom_regexes=regexes, - timeout=TIMEOUT) + timeout=TIMEOUT, + ) out, _ = capsys.readouterr() @@ -355,8 +355,7 @@ def test_inspect_gcs_file_with_custom_info_types( @pytest.mark.flaky(max_runs=2, min_passes=1) -def test_inspect_gcs_file_no_results( - bucket, topic_id, subscription_id, capsys): +def test_inspect_gcs_file_no_results(bucket, topic_id, subscription_id, capsys): try: inspect_content.inspect_gcs_file( GCLOUD_PROJECT, @@ -365,7 +364,8 @@ def test_inspect_gcs_file_no_results( topic_id, subscription_id, ["EMAIL_ADDRESS", "PHONE_NUMBER"], - timeout=TIMEOUT) + timeout=TIMEOUT, + ) out, _ = capsys.readouterr() @@ -384,7 +384,8 @@ def test_inspect_gcs_image_file(bucket, topic_id, subscription_id, capsys): topic_id, subscription_id, ["EMAIL_ADDRESS", "PHONE_NUMBER"], - timeout=TIMEOUT) + timeout=TIMEOUT, + ) out, _ = capsys.readouterr() assert "Info type: EMAIL_ADDRESS" in out @@ -402,7 +403,8 @@ def test_inspect_gcs_multiple_files(bucket, topic_id, subscription_id, capsys): topic_id, subscription_id, ["EMAIL_ADDRESS", "PHONE_NUMBER"], - timeout=TIMEOUT) + timeout=TIMEOUT, + ) out, _ = capsys.readouterr() @@ -412,8 +414,7 @@ def test_inspect_gcs_multiple_files(bucket, topic_id, subscription_id, capsys): @pytest.mark.flaky(max_runs=2, min_passes=1) -def test_inspect_datastore( - datastore_project, topic_id, subscription_id, capsys): +def test_inspect_datastore(datastore_project, topic_id, subscription_id, capsys): try: inspect_content.inspect_datastore( GCLOUD_PROJECT, @@ -422,7 +423,8 @@ def test_inspect_datastore( topic_id, subscription_id, ["FIRST_NAME", "EMAIL_ADDRESS", "PHONE_NUMBER"], - timeout=TIMEOUT) + timeout=TIMEOUT, + ) out, _ = capsys.readouterr() assert "Info type: EMAIL_ADDRESS" in out @@ -432,7 +434,8 @@ def test_inspect_datastore( @pytest.mark.flaky(max_runs=2, min_passes=1) def test_inspect_datastore_no_results( - datastore_project, topic_id, subscription_id, capsys): + datastore_project, topic_id, subscription_id, capsys +): try: inspect_content.inspect_datastore( GCLOUD_PROJECT, @@ -441,7 +444,8 @@ def test_inspect_datastore_no_results( topic_id, subscription_id, ["PHONE_NUMBER"], - timeout=TIMEOUT) + timeout=TIMEOUT, + ) out, _ = capsys.readouterr() assert "No findings" in out @@ -459,7 +463,8 @@ def test_inspect_bigquery(bigquery_project, topic_id, subscription_id, capsys): topic_id, subscription_id, ["FIRST_NAME", "EMAIL_ADDRESS", "PHONE_NUMBER"], - timeout=1) + timeout=1, + ) out, _ = capsys.readouterr() assert "Inspection operation started" in out diff --git a/samples/snippets/jobs.py b/samples/snippets/jobs.py index a8ac0b43..971aa7f6 100644 --- a/samples/snippets/jobs.py +++ b/samples/snippets/jobs.py @@ -61,25 +61,26 @@ def list_dlp_jobs(project, filter_string=None, job_type=None): dlp = google.cloud.dlp_v2.DlpServiceClient() # Convert the project id into a full resource id. - parent = dlp.project_path(project) + parent = f"projects/{project}" # Job type dictionary job_type_to_int = { - "DLP_JOB_TYPE_UNSPECIFIED": - google.cloud.dlp.enums.DlpJobType.DLP_JOB_TYPE_UNSPECIFIED, - "INSPECT_JOB": google.cloud.dlp.enums.DlpJobType.INSPECT_JOB, - "RISK_ANALYSIS_JOB": google.cloud.dlp.enums.DlpJobType.RISK_ANALYSIS_JOB, + "DLP_JOB_TYPE_UNSPECIFIED": google.cloud.dlp.DlpJobType.DLP_JOB_TYPE_UNSPECIFIED, + "INSPECT_JOB": google.cloud.dlp.DlpJobType.INSPECT_JOB, + "RISK_ANALYSIS_JOB": google.cloud.dlp.DlpJobType.RISK_ANALYSIS_JOB, } # If job type is specified, convert job type to number through enums. if job_type: job_type = job_type_to_int[job_type] # Call the API to get a list of jobs. - response = dlp.list_dlp_jobs(parent, filter_=filter_string, type_=job_type) + response = dlp.list_dlp_jobs( + request={"parent": parent, "filter": filter_string, "type": job_type} + ) # Iterate over results. for job in response: - print("Job: %s; status: %s" % (job.name, job.JobState.Name(job.state))) + print("Job: %s; status: %s" % (job.name, job.state.name)) # [END dlp_list_jobs] @@ -103,10 +104,10 @@ def delete_dlp_job(project, job_name): dlp = google.cloud.dlp_v2.DlpServiceClient() # Convert the project id and job name into a full resource id. - name = dlp.dlp_job_path(project, job_name) + name = f"projects/{project}/dlpJobs/{job_name}" # Call the API to delete job. - dlp.delete_dlp_job(name) + dlp.delete_dlp_job(request={"name": name}) print("Successfully deleted %s" % job_name) @@ -123,8 +124,7 @@ def delete_dlp_job(project, job_name): list_parser = subparsers.add_parser( "list", - help="List Data Loss Prevention API jobs corresponding to a given " - "filter.", + help="List Data Loss Prevention API jobs corresponding to a given " "filter.", ) list_parser.add_argument( "project", help="The project id to use as a parent resource." @@ -137,11 +137,7 @@ def delete_dlp_job(project, job_name): list_parser.add_argument( "-t", "--type", - choices=[ - "DLP_JOB_TYPE_UNSPECIFIED", - "INSPECT_JOB", - "RISK_ANALYSIS_JOB", - ], + choices=["DLP_JOB_TYPE_UNSPECIFIED", "INSPECT_JOB", "RISK_ANALYSIS_JOB"], help='The type of job. API defaults to "INSPECT"', ) @@ -153,15 +149,12 @@ def delete_dlp_job(project, job_name): ) delete_parser.add_argument( "job_name", - help="The name of the DlpJob resource to be deleted. " - "Example: X-#####", + help="The name of the DlpJob resource to be deleted. " "Example: X-#####", ) args = parser.parse_args() if args.content == "list": - list_dlp_jobs( - args.project, filter_string=args.filter, job_type=args.type - ) + list_dlp_jobs(args.project, filter_string=args.filter, job_type=args.type) elif args.content == "delete": delete_dlp_job(args.project, args.job_name) diff --git a/samples/snippets/jobs_test.py b/samples/snippets/jobs_test.py index 89997bc5..361118d4 100644 --- a/samples/snippets/jobs_test.py +++ b/samples/snippets/jobs_test.py @@ -33,7 +33,7 @@ def test_job_name(): dlp = google.cloud.dlp_v2.DlpServiceClient() - parent = dlp.project_path(GCLOUD_PROJECT) + parent = f"projects/{GCLOUD_PROJECT}" # Construct job request risk_job = { @@ -47,15 +47,17 @@ def test_job_name(): }, } - response = dlp.create_dlp_job(parent, risk_job=risk_job, job_id=test_job_id) + response = dlp.create_dlp_job( + request={"parent": parent, "risk_job": risk_job, "job_id": test_job_id} + ) full_path = response.name # API expects only job name, not full project path - job_name = full_path[full_path.rfind("/") + 1:] + job_name = full_path[full_path.rfind("/") + 1 :] yield job_name # clean up job if not deleted try: - dlp.delete_dlp_job(full_path) + dlp.delete_dlp_job(request={"name": full_path}) except google.api_core.exceptions.NotFound: print("Issue during teardown, missing job") diff --git a/samples/snippets/metadata.py b/samples/snippets/metadata.py index 7a65941d..0a3b4380 100644 --- a/samples/snippets/metadata.py +++ b/samples/snippets/metadata.py @@ -38,7 +38,9 @@ def list_info_types(language_code=None, result_filter=None): dlp = google.cloud.dlp_v2.DlpServiceClient() # Make the API call. - response = dlp.list_info_types(language_code, result_filter) + response = dlp.list_info_types( + request={"parent": language_code, "filter": result_filter} + ) # Print the results to the console. print("Info types:") @@ -56,8 +58,7 @@ def list_info_types(language_code=None, result_filter=None): if __name__ == "__main__": parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( - "--language_code", - help="The BCP-47 language code to use, e.g. 'en-US'.", + "--language_code", help="The BCP-47 language code to use, e.g. 'en-US'.", ) parser.add_argument( "--filter", @@ -67,6 +68,4 @@ def list_info_types(language_code=None, result_filter=None): args = parser.parse_args() - list_info_types( - language_code=args.language_code, result_filter=args.filter - ) + list_info_types(language_code=args.language_code, result_filter=args.filter) diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index ba55d7ce..5660f08b 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -37,24 +37,22 @@ TEST_CONFIG = { # You can opt out from the test for specific Python versions. - 'ignored_versions': ["2.7"], - + "ignored_versions": ["2.7"], # An envvar key for determining the project id to use. Change it # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a # build specific Cloud project. You can also use your own string # to use your own Cloud project. - 'gcloud_project_env': 'GOOGLE_CLOUD_PROJECT', + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', - # A dictionary you want to inject into your test. Don't put any # secrets here. These values will override predefined values. - 'envs': {}, + "envs": {}, } try: # Ensure we can import noxfile_config in the project's directory. - sys.path.append('.') + sys.path.append(".") from noxfile_config import TEST_CONFIG_OVERRIDE except ImportError as e: print("No user noxfile_config found: detail: {}".format(e)) @@ -69,12 +67,12 @@ def get_pytest_env_vars(): ret = {} # Override the GCLOUD_PROJECT and the alias. - env_key = TEST_CONFIG['gcloud_project_env'] + env_key = TEST_CONFIG["gcloud_project_env"] # This should error out if not set. - ret['GOOGLE_CLOUD_PROJECT'] = os.environ[env_key] + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] # Apply user supplied envs. - ret.update(TEST_CONFIG['envs']) + ret.update(TEST_CONFIG["envs"]) return ret @@ -83,7 +81,7 @@ def get_pytest_env_vars(): ALL_VERSIONS = ["2.7", "3.6", "3.7", "3.8"] # Any default versions that should be ignored. -IGNORED_VERSIONS = TEST_CONFIG['ignored_versions'] +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) @@ -138,7 +136,7 @@ def lint(session): args = FLAKE8_COMMON_ARGS + [ "--application-import-names", ",".join(local_names), - "." + ".", ] session.run("flake8", *args) @@ -182,9 +180,9 @@ def py(session): if session.python in TESTED_VERSIONS: _session_tests(session) else: - session.skip("SKIPPED: {} tests are disabled for this sample.".format( - session.python - )) + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) # diff --git a/samples/snippets/quickstart.py b/samples/snippets/quickstart.py index ec929b45..d40fac4f 100644 --- a/samples/snippets/quickstart.py +++ b/samples/snippets/quickstart.py @@ -41,7 +41,7 @@ def quickstart(project_id): info_types = [{"name": "FIRST_NAME"}, {"name": "LAST_NAME"}] # The minimum likelihood to constitute a match. Optional. - min_likelihood = "LIKELIHOOD_UNSPECIFIED" + min_likelihood = google.cloud.dlp_v2.Likelihood.LIKELIHOOD_UNSPECIFIED # The maximum number of findings to report (0 = server maximum). Optional. max_findings = 0 @@ -59,10 +59,12 @@ def quickstart(project_id): } # Convert the project id into a full resource id. - parent = dlp_client.project_path(project_id) + parent = f"projects/{project_id}" # Call the API. - response = dlp_client.inspect_content(parent, inspect_config, item) + response = dlp_client.inspect_content( + request={"parent": parent, "inspect_config": inspect_config, "item": item} + ) # Print out the results. if response.result.findings: @@ -73,13 +75,7 @@ def quickstart(project_id): pass print("Info type: {}".format(finding.info_type.name)) # Convert likelihood value to string respresentation. - likelihood = ( - google.cloud.dlp.types.Finding.DESCRIPTOR.fields_by_name[ - "likelihood" - ] - .enum_type.values_by_number[finding.likelihood] - .name - ) + likelihood = finding.likelihood.name print("Likelihood: {}".format(likelihood)) else: print("No findings.") @@ -88,9 +84,7 @@ def quickstart(project_id): if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument( - "project_id", help="Enter your GCP project id.", type=str - ) + parser.add_argument("project_id", help="Enter your GCP project id.", type=str) args = parser.parse_args() if len(sys.argv) == 1: parser.print_usage() diff --git a/samples/snippets/quickstart_test.py b/samples/snippets/quickstart_test.py index 1814497c..153c0cd8 100644 --- a/samples/snippets/quickstart_test.py +++ b/samples/snippets/quickstart_test.py @@ -14,9 +14,6 @@ import os -import google.cloud.dlp -import mock - import quickstart @@ -24,13 +21,7 @@ def test_quickstart(capsys): - # Mock out project_path to use the test runner's project ID. - with mock.patch.object( - google.cloud.dlp.DlpServiceClient, - "project_path", - return_value="projects/{}".format(GCLOUD_PROJECT), - ): - quickstart.quickstart(GCLOUD_PROJECT) + quickstart.quickstart(GCLOUD_PROJECT) out, _ = capsys.readouterr() assert "FIRST_NAME" in out diff --git a/samples/snippets/redact.py b/samples/snippets/redact.py index 8a1650a2..b8307530 100644 --- a/samples/snippets/redact.py +++ b/samples/snippets/redact.py @@ -30,12 +30,7 @@ def redact_image( - project, - filename, - output_filename, - info_types, - min_likelihood=None, - mime_type=None, + project, filename, output_filename, info_types, min_likelihood=None, mime_type=None, ): """Uses the Data Loss Prevention API to redact protected data in an image. Args: @@ -99,14 +94,16 @@ def redact_image( byte_item = {"type": content_type_index, "data": f.read()} # Convert the project id into a full resource id. - parent = dlp.project_path(project) + parent = f"projects/{project}" # Call the API. response = dlp.redact_image( - parent, - inspect_config=inspect_config, - image_redaction_configs=image_redaction_configs, - byte_item=byte_item, + request={ + "parent": parent, + "inspect_config": inspect_config, + "image_redaction_configs": image_redaction_configs, + "byte_item": byte_item, + } ) # Write out the results. @@ -125,9 +122,7 @@ def redact_image( def redact_image_all_text( - project, - filename, - output_filename, + project, filename, output_filename, ): """Uses the Data Loss Prevention API to redact all text in an image. @@ -147,30 +142,33 @@ def redact_image_all_text( # Construct the image_redaction_configs, indicating to DLP that all text in # the input image should be redacted. - image_redaction_configs = [{ - "redact_all_text": True, - }] + image_redaction_configs = [{"redact_all_text": True}] # Construct the byte_item, containing the file's byte data. with open(filename, mode="rb") as f: - byte_item = {"type": "IMAGE", "data": f.read()} + byte_item = {"type": google.cloud.dlp_v2.FileType.IMAGE, "data": f.read()} # Convert the project id into a full resource id. - parent = dlp.project_path(project) + parent = f"projects/{project}" # Call the API. response = dlp.redact_image( - parent, - image_redaction_configs=image_redaction_configs, - byte_item=byte_item, + request={ + "parent": parent, + "image_redaction_configs": image_redaction_configs, + "byte_item": byte_item, + } ) # Write out the results. with open(output_filename, mode="wb") as f: f.write(response.redacted_image) - print("Wrote {byte_count} to {filename}".format( - byte_count=len(response.redacted_image), filename=output_filename)) + print( + "Wrote {byte_count} to {filename}".format( + byte_count=len(response.redacted_image), filename=output_filename + ) + ) # [END dlp_redact_image_all_text] @@ -184,16 +182,15 @@ def redact_image_all_text( help="The Google Cloud project id to use as a parent resource.", default=default_project, ) + common_args_parser.add_argument("filename", help="The path to the file to inspect.") common_args_parser.add_argument( - "filename", help="The path to the file to inspect.") - common_args_parser.add_argument( - "output_filename", - help="The path to which the redacted image will be written.", + "output_filename", help="The path to which the redacted image will be written.", ) parser = argparse.ArgumentParser(description=__doc__) subparsers = parser.add_subparsers( - dest="content", help="Select which content should be redacted.") + dest="content", help="Select which content should be redacted." + ) subparsers.required = True info_types_parser = subparsers.add_parser( @@ -249,7 +246,5 @@ def redact_image_all_text( ) elif args.content == "all_text": redact_image_all_text( - args.project, - args.filename, - args.output_filename, + args.project, args.filename, args.output_filename, ) diff --git a/samples/snippets/redact_test.py b/samples/snippets/redact_test.py index 0cce514e..a6f85e33 100644 --- a/samples/snippets/redact_test.py +++ b/samples/snippets/redact_test.py @@ -36,10 +36,7 @@ def test_redact_image_file(tempdir, capsys): output_filepath = os.path.join(tempdir, "redacted.png") redact.redact_image( - GCLOUD_PROJECT, - test_filepath, - output_filepath, - ["FIRST_NAME", "EMAIL_ADDRESS"], + GCLOUD_PROJECT, test_filepath, output_filepath, ["FIRST_NAME", "EMAIL_ADDRESS"], ) out, _ = capsys.readouterr() @@ -51,9 +48,7 @@ def test_redact_image_all_text(tempdir, capsys): output_filepath = os.path.join(tempdir, "redacted.png") redact.redact_image_all_text( - GCLOUD_PROJECT, - test_filepath, - output_filepath, + GCLOUD_PROJECT, test_filepath, output_filepath, ) out, _ = capsys.readouterr() diff --git a/samples/snippets/risk.py b/samples/snippets/risk.py index 518f947e..59f7362a 100644 --- a/samples/snippets/risk.py +++ b/samples/snippets/risk.py @@ -61,7 +61,7 @@ def numerical_risk_analysis( # Convert the project id into full resource ids. topic = google.cloud.pubsub.PublisherClient.topic_path(project, topic_id) - parent = dlp.location_path(project, 'global') + parent = f"projects/{project}/locations/global" # Location info of the BigQuery table. source_table = { @@ -76,15 +76,13 @@ def numerical_risk_analysis( # Configure risk analysis job # Give the name of the numeric column to compute risk metrics for risk_job = { - "privacy_metric": { - "numerical_stats_config": {"field": {"name": column_name}} - }, + "privacy_metric": {"numerical_stats_config": {"field": {"name": column_name}}}, "source_table": source_table, "actions": actions, } # Call API to start risk analysis job - operation = dlp.create_dlp_job(parent, risk_job=risk_job) + operation = dlp.create_dlp_job(request={"parent": parent, "risk_job": risk_job}) def callback(message): if message.attributes["DlpJobName"] == operation.name: @@ -92,12 +90,11 @@ def callback(message): message.ack() # Now that the job is done, fetch the results and print them. - job = dlp.get_dlp_job(operation.name) + job = dlp.get_dlp_job(request={"name": operation.name}) results = job.risk_details.numerical_stats_result print( "Value Range: [{}, {}]".format( - results.min_value.integer_value, - results.max_value.integer_value, + results.min_value.integer_value, results.max_value.integer_value, ) ) prev_value = None @@ -172,7 +169,7 @@ def categorical_risk_analysis( # Convert the project id into full resource ids. topic = google.cloud.pubsub.PublisherClient.topic_path(project, topic_id) - parent = dlp.location_path(project, 'global') + parent = f"projects/{project}/locations/global" # Location info of the BigQuery table. source_table = { @@ -195,7 +192,7 @@ def categorical_risk_analysis( } # Call API to start risk analysis job - operation = dlp.create_dlp_job(parent, risk_job=risk_job) + operation = dlp.create_dlp_job(request={"parent": parent, "risk_job": risk_job}) def callback(message): if message.attributes["DlpJobName"] == operation.name: @@ -203,7 +200,7 @@ def callback(message): message.ack() # Now that the job is done, fetch the results and print them. - job = dlp.get_dlp_job(operation.name) + job = dlp.get_dlp_job(request={"name": operation.name}) histogram_buckets = ( job.risk_details.categorical_stats_result.value_frequency_histogram_buckets # noqa: E501 ) @@ -297,7 +294,7 @@ def get_values(obj): # Convert the project id into a full resource id. topic = google.cloud.pubsub.PublisherClient.topic_path(project, topic_id) - parent = dlp.location_path(project, 'global') + parent = f"projects/{project}/locations/global" # Location info of the BigQuery table. source_table = { @@ -324,7 +321,7 @@ def map_fields(field): } # Call API to start risk analysis job - operation = dlp.create_dlp_job(parent, risk_job=risk_job) + operation = dlp.create_dlp_job(request={"parent": parent, "risk_job": risk_job}) def callback(message): if message.attributes["DlpJobName"] == operation.name: @@ -332,7 +329,7 @@ def callback(message): message.ack() # Now that the job is done, fetch the results and print them. - job = dlp.get_dlp_job(operation.name) + job = dlp.get_dlp_job(request={"name": operation.name}) histogram_buckets = ( job.risk_details.k_anonymity_result.equivalence_class_histogram_buckets ) @@ -429,7 +426,7 @@ def get_values(obj): # Convert the project id into a full resource id. topic = google.cloud.pubsub.PublisherClient.topic_path(project, topic_id) - parent = dlp.location_path(project, 'global') + parent = f"projects/{project}/locations/global" # Location info of the BigQuery table. source_table = { @@ -461,7 +458,7 @@ def map_fields(field): } # Call API to start risk analysis job - operation = dlp.create_dlp_job(parent, risk_job=risk_job) + operation = dlp.create_dlp_job(request={"parent": parent, "risk_job": risk_job}) def callback(message): if message.attributes["DlpJobName"] == operation.name: @@ -469,7 +466,7 @@ def callback(message): message.ack() # Now that the job is done, fetch the results and print them. - job = dlp.get_dlp_job(operation.name) + job = dlp.get_dlp_job(request={"name": operation.name}) histogram_buckets = ( job.risk_details.l_diversity_result.sensitive_value_frequency_histogram_buckets # noqa: E501 ) @@ -489,9 +486,7 @@ def callback(message): ) ) print( - " Class size: {}".format( - value_bucket.equivalence_class_size - ) + " Class size: {}".format(value_bucket.equivalence_class_size) ) for value in value_bucket.top_sensitive_values: print( @@ -580,7 +575,7 @@ def get_values(obj): # Convert the project id into full resource ids. topic = google.cloud.pubsub.PublisherClient.topic_path(project, topic_id) - parent = dlp.location_path(project, 'global') + parent = f"projects/{project}/locations/global" # Location info of the BigQuery table. source_table = { @@ -619,7 +614,7 @@ def map_fields(quasi_id, info_type): } # Call API to start risk analysis job - operation = dlp.create_dlp_job(parent, risk_job=risk_job) + operation = dlp.create_dlp_job(request={"parent": parent, "risk_job": risk_job}) def callback(message): if message.attributes["DlpJobName"] == operation.name: @@ -627,7 +622,7 @@ def callback(message): message.ack() # Now that the job is done, fetch the results and print them. - job = dlp.get_dlp_job(operation.name) + job = dlp.get_dlp_job(request={"name": operation.name}) histogram_buckets = ( job.risk_details.k_map_estimation_result.k_map_estimation_histogram ) @@ -684,8 +679,7 @@ def callback(message): numerical_parser = subparsers.add_parser("numerical", help="") numerical_parser.add_argument( - "project", - help="The Google Cloud project id to use as a parent resource.", + "project", help="The Google Cloud project id to use as a parent resource.", ) numerical_parser.add_argument( "table_project_id", @@ -694,12 +688,9 @@ def callback(message): numerical_parser.add_argument( "dataset_id", help="The id of the dataset to inspect." ) + numerical_parser.add_argument("table_id", help="The id of the table to inspect.") numerical_parser.add_argument( - "table_id", help="The id of the table to inspect." - ) - numerical_parser.add_argument( - "column_name", - help="The name of the column to compute risk metrics for.", + "column_name", help="The name of the column to compute risk metrics for.", ) numerical_parser.add_argument( "topic_id", @@ -718,8 +709,7 @@ def callback(message): categorical_parser = subparsers.add_parser("categorical", help="") categorical_parser.add_argument( - "project", - help="The Google Cloud project id to use as a parent resource.", + "project", help="The Google Cloud project id to use as a parent resource.", ) categorical_parser.add_argument( "table_project_id", @@ -728,12 +718,9 @@ def callback(message): categorical_parser.add_argument( "dataset_id", help="The id of the dataset to inspect." ) + categorical_parser.add_argument("table_id", help="The id of the table to inspect.") categorical_parser.add_argument( - "table_id", help="The id of the table to inspect." - ) - categorical_parser.add_argument( - "column_name", - help="The name of the column to compute risk metrics for.", + "column_name", help="The name of the column to compute risk metrics for.", ) categorical_parser.add_argument( "topic_id", @@ -752,12 +739,10 @@ def callback(message): k_anonymity_parser = subparsers.add_parser( "k_anonymity", - help="Computes the k-anonymity of a column set in a Google BigQuery" - "table.", + help="Computes the k-anonymity of a column set in a Google BigQuery" "table.", ) k_anonymity_parser.add_argument( - "project", - help="The Google Cloud project id to use as a parent resource.", + "project", help="The Google Cloud project id to use as a parent resource.", ) k_anonymity_parser.add_argument( "table_project_id", @@ -766,9 +751,7 @@ def callback(message): k_anonymity_parser.add_argument( "dataset_id", help="The id of the dataset to inspect." ) - k_anonymity_parser.add_argument( - "table_id", help="The id of the table to inspect." - ) + k_anonymity_parser.add_argument("table_id", help="The id of the table to inspect.") k_anonymity_parser.add_argument( "topic_id", help="The name of the Pub/Sub topic to notify once the job completes.", @@ -779,9 +762,7 @@ def callback(message): "job completion notifications.", ) k_anonymity_parser.add_argument( - "quasi_ids", - nargs="+", - help="A set of columns that form a composite key.", + "quasi_ids", nargs="+", help="A set of columns that form a composite key.", ) k_anonymity_parser.add_argument( "--timeout", @@ -791,12 +772,10 @@ def callback(message): l_diversity_parser = subparsers.add_parser( "l_diversity", - help="Computes the l-diversity of a column set in a Google BigQuery" - "table.", + help="Computes the l-diversity of a column set in a Google BigQuery" "table.", ) l_diversity_parser.add_argument( - "project", - help="The Google Cloud project id to use as a parent resource.", + "project", help="The Google Cloud project id to use as a parent resource.", ) l_diversity_parser.add_argument( "table_project_id", @@ -805,9 +784,7 @@ def callback(message): l_diversity_parser.add_argument( "dataset_id", help="The id of the dataset to inspect." ) - l_diversity_parser.add_argument( - "table_id", help="The id of the table to inspect." - ) + l_diversity_parser.add_argument("table_id", help="The id of the table to inspect.") l_diversity_parser.add_argument( "topic_id", help="The name of the Pub/Sub topic to notify once the job completes.", @@ -818,13 +795,10 @@ def callback(message): "job completion notifications.", ) l_diversity_parser.add_argument( - "sensitive_attribute", - help="The column to measure l-diversity relative to.", + "sensitive_attribute", help="The column to measure l-diversity relative to.", ) l_diversity_parser.add_argument( - "quasi_ids", - nargs="+", - help="A set of columns that form a composite key.", + "quasi_ids", nargs="+", help="A set of columns that form a composite key.", ) l_diversity_parser.add_argument( "--timeout", @@ -838,19 +812,14 @@ def callback(message): "BigQuery table.", ) k_map_parser.add_argument( - "project", - help="The Google Cloud project id to use as a parent resource.", + "project", help="The Google Cloud project id to use as a parent resource.", ) k_map_parser.add_argument( "table_project_id", help="The Google Cloud project id where the BigQuery table is stored.", ) - k_map_parser.add_argument( - "dataset_id", help="The id of the dataset to inspect." - ) - k_map_parser.add_argument( - "table_id", help="The id of the table to inspect." - ) + k_map_parser.add_argument("dataset_id", help="The id of the dataset to inspect.") + k_map_parser.add_argument("table_id", help="The id of the table to inspect.") k_map_parser.add_argument( "topic_id", help="The name of the Pub/Sub topic to notify once the job completes.", @@ -861,9 +830,7 @@ def callback(message): "job completion notifications.", ) k_map_parser.add_argument( - "quasi_ids", - nargs="+", - help="A set of columns that form a composite key.", + "quasi_ids", nargs="+", help="A set of columns that form a composite key.", ) k_map_parser.add_argument( "-t", diff --git a/samples/snippets/risk_test.py b/samples/snippets/risk_test.py index 25d9575d..46208a9c 100644 --- a/samples/snippets/risk_test.py +++ b/samples/snippets/risk_test.py @@ -63,9 +63,7 @@ def subscription_id(topic_id): # Subscribes to a topic. subscriber = google.cloud.pubsub.SubscriberClient() topic_path = subscriber.topic_path(GCLOUD_PROJECT, topic_id) - subscription_path = subscriber.subscription_path( - GCLOUD_PROJECT, SUBSCRIPTION_ID - ) + subscription_path = subscriber.subscription_path(GCLOUD_PROJECT, SUBSCRIPTION_ID) try: subscriber.create_subscription(subscription_path, topic_path) except google.api_core.exceptions.AlreadyExists: @@ -101,9 +99,7 @@ def bigquery_project(): harmful_table.schema = ( google.cloud.bigquery.SchemaField("Name", "STRING", "REQUIRED"), - google.cloud.bigquery.SchemaField( - "TelephoneNumber", "STRING", "REQUIRED" - ), + google.cloud.bigquery.SchemaField("TelephoneNumber", "STRING", "REQUIRED"), google.cloud.bigquery.SchemaField("Mystery", "STRING", "REQUIRED"), google.cloud.bigquery.SchemaField("Age", "INTEGER", "REQUIRED"), google.cloud.bigquery.SchemaField("Gender", "STRING"), @@ -122,40 +118,12 @@ def bigquery_project(): rows_to_insert = [(u"Gary Smith", u"My email is gary@example.com")] harmful_rows_to_insert = [ - ( - u"Gandalf", - u"(123) 456-7890", - "4231 5555 6781 9876", - 27, - "Male", - "US", - ), - ( - u"Dumbledore", - u"(313) 337-1337", - "6291 8765 1095 7629", - 27, - "Male", - "US", - ), + (u"Gandalf", u"(123) 456-7890", "4231 5555 6781 9876", 27, "Male", "US",), + (u"Dumbledore", u"(313) 337-1337", "6291 8765 1095 7629", 27, "Male", "US",), (u"Joe", u"(452) 123-1234", "3782 2288 1166 3030", 35, "Male", "US"), (u"James", u"(567) 890-1234", "8291 3627 8250 1234", 19, "Male", "US"), - ( - u"Marie", - u"(452) 123-1234", - "8291 3627 8250 1234", - 35, - "Female", - "US", - ), - ( - u"Carrie", - u"(567) 890-1234", - "2253 5218 4251 4526", - 35, - "Female", - "US", - ), + (u"Marie", u"(452) 123-1234", "8291 3627 8250 1234", 35, "Female", "US",), + (u"Carrie", u"(567) 890-1234", "2253 5218 4251 4526", 35, "Female", "US",), ] bigquery_client.insert_rows(table, rows_to_insert) @@ -166,9 +134,7 @@ def bigquery_project(): @pytest.mark.flaky(max_runs=3, min_passes=1) -def test_numerical_risk_analysis( - topic_id, subscription_id, bigquery_project, capsys -): +def test_numerical_risk_analysis(topic_id, subscription_id, bigquery_project, capsys): risk.numerical_risk_analysis( GCLOUD_PROJECT, TABLE_PROJECT, diff --git a/samples/snippets/templates.py b/samples/snippets/templates.py index 2d9f8137..3c00d1e1 100644 --- a/samples/snippets/templates.py +++ b/samples/snippets/templates.py @@ -18,7 +18,6 @@ import argparse import os -import time # [START dlp_create_template] @@ -74,11 +73,15 @@ def create_inspect_template( } # Convert the project id into a full resource id. - parent = dlp.project_path(project) + parent = f"projects/{project}" # Call the API. response = dlp.create_inspect_template( - parent, inspect_template=inspect_template, template_id=template_id + request={ + "parent": parent, + "inspect_template": inspect_template, + "template_id": template_id, + } ) print("Successfully created template {}".format(response.name)) @@ -103,32 +106,21 @@ def list_inspect_templates(project): dlp = google.cloud.dlp_v2.DlpServiceClient() # Convert the project id into a full resource id. - parent = dlp.project_path(project) + parent = f"projects/{project}" # Call the API. - response = dlp.list_inspect_templates(parent) - - # Define a helper function to convert the API's "seconds since the epoch" - # time format into a human-readable string. - def human_readable_time(timestamp): - return str(time.localtime(timestamp.seconds)) + response = dlp.list_inspect_templates(request={"parent": parent}) for template in response: print("Template {}:".format(template.name)) if template.display_name: print(" Display Name: {}".format(template.display_name)) - print( - " Created: {}".format(human_readable_time(template.create_time)) - ) - print( - " Updated: {}".format(human_readable_time(template.update_time)) - ) + print(" Created: {}".format(template.create_time)) + print(" Updated: {}".format(template.update_time)) config = template.inspect_config print( - " InfoTypes: {}".format( - ", ".join([it.name for it in config.info_types]) - ) + " InfoTypes: {}".format(", ".join([it.name for it in config.info_types])) ) print(" Minimum likelihood: {}".format(config.min_likelihood)) print(" Include quotes: {}".format(config.include_quote)) @@ -159,13 +151,13 @@ def delete_inspect_template(project, template_id): dlp = google.cloud.dlp_v2.DlpServiceClient() # Convert the project id into a full resource id. - parent = dlp.project_path(project) + parent = f"projects/{project}" # Combine the template id with the parent id. template_resource = "{}/inspectTemplates/{}".format(parent, template_id) # Call the API. - dlp.delete_inspect_template(template_resource) + dlp.delete_inspect_template(request={"name": template_resource}) print("Template {} successfully deleted.".format(template_resource)) @@ -185,8 +177,7 @@ def delete_inspect_template(project, template_id): parser_create = subparsers.add_parser("create", help="Create a template.") parser_create.add_argument( "--template_id", - help="The id of the template. If omitted, an id will be randomly " - "generated", + help="The id of the template. If omitted, an id will be randomly " "generated", ) parser_create.add_argument( "--display_name", help="The optional display name of the template." @@ -239,9 +230,7 @@ def delete_inspect_template(project, template_id): ) parser_delete = subparsers.add_parser("delete", help="Delete a template.") - parser_delete.add_argument( - "template_id", help="The id of the template to delete." - ) + parser_delete.add_argument("template_id", help="The id of the template to delete.") parser_delete.add_argument( "--project", help="The Google Cloud project id to use as a parent resource.", diff --git a/samples/snippets/triggers.py b/samples/snippets/triggers.py index 7548ab89..0070da3c 100644 --- a/samples/snippets/triggers.py +++ b/samples/snippets/triggers.py @@ -18,7 +18,6 @@ import argparse import os -import time # [START dlp_create_trigger] @@ -92,9 +91,7 @@ def create_trigger( # Construct the schedule definition: schedule = { - "recurrence_period_duration": { - "seconds": scan_period_days * 60 * 60 * 24 - } + "recurrence_period_duration": {"seconds": scan_period_days * 60 * 60 * 24} } # Construct the trigger definition. @@ -103,15 +100,15 @@ def create_trigger( "display_name": display_name, "description": description, "triggers": [{"schedule": schedule}], - "status": "HEALTHY", + "status": google.cloud.dlp_v2.JobTrigger.Status.HEALTHY, } # Convert the project id into a full resource id. - parent = dlp.project_path(project) + parent = f"projects/{project}" # Call the API. response = dlp.create_job_trigger( - parent, job_trigger=job_trigger, trigger_id=trigger_id + request={"parent": parent, "job_trigger": job_trigger, "trigger_id": trigger_id} ) print("Successfully created trigger {}".format(response.name)) @@ -136,20 +133,15 @@ def list_triggers(project): dlp = google.cloud.dlp_v2.DlpServiceClient() # Convert the project id into a full resource id. - parent = dlp.project_path(project) + parent = f"projects/{project}" # Call the API. - response = dlp.list_job_triggers(parent) - - # Define a helper function to convert the API's "seconds since the epoch" - # time format into a human-readable string. - def human_readable_time(timestamp): - return str(time.localtime(timestamp.seconds)) + response = dlp.list_job_triggers(request={"parent": parent}) for trigger in response: print("Trigger {}:".format(trigger.name)) - print(" Created: {}".format(human_readable_time(trigger.create_time))) - print(" Updated: {}".format(human_readable_time(trigger.update_time))) + print(" Created: {}".format(trigger.create_time)) + print(" Updated: {}".format(trigger.update_time)) if trigger.display_name: print(" Display Name: {}".format(trigger.display_name)) if trigger.description: @@ -178,13 +170,13 @@ def delete_trigger(project, trigger_id): dlp = google.cloud.dlp_v2.DlpServiceClient() # Convert the project id into a full resource id. - parent = dlp.project_path(project) + parent = f"projects/{project}" # Combine the trigger id with the parent id. trigger_resource = "{}/jobTriggers/{}".format(parent, trigger_id) # Call the API. - dlp.delete_job_trigger(trigger_resource) + dlp.delete_job_trigger(request={"name": trigger_resource}) print("Trigger {} successfully deleted.".format(trigger_resource)) @@ -212,8 +204,7 @@ def delete_trigger(project, trigger_id): ) parser_create.add_argument( "--trigger_id", - help="The id of the trigger. If omitted, an id will be randomly " - "generated", + help="The id of the trigger. If omitted, an id will be randomly " "generated", ) parser_create.add_argument( "--display_name", help="The optional display name of the trigger." @@ -254,9 +245,7 @@ def delete_trigger(project, trigger_id): help="The maximum number of findings to report; 0 = no maximum.", ) parser_create.add_argument( - "--auto_populate_timespan", - type=bool, - help="Limit scan to new content only.", + "--auto_populate_timespan", type=bool, help="Limit scan to new content only.", ) parser_list = subparsers.add_parser("list", help="List all triggers.") @@ -267,9 +256,7 @@ def delete_trigger(project, trigger_id): ) parser_delete = subparsers.add_parser("delete", help="Delete a trigger.") - parser_delete.add_argument( - "trigger_id", help="The id of the trigger to delete." - ) + parser_delete.add_argument("trigger_id", help="The id of the trigger to delete.") parser_delete.add_argument( "--project", help="The Google Cloud project id to use as a parent resource.", diff --git a/scripts/fixup_dlp_v2_keywords.py b/scripts/fixup_dlp_v2_keywords.py new file mode 100644 index 00000000..5acfcdbe --- /dev/null +++ b/scripts/fixup_dlp_v2_keywords.py @@ -0,0 +1,211 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class dlpCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'activate_job_trigger': ('name', ), + 'cancel_dlp_job': ('name', ), + 'create_deidentify_template': ('parent', 'deidentify_template', 'template_id', 'location_id', ), + 'create_dlp_job': ('parent', 'inspect_job', 'risk_job', 'job_id', 'location_id', ), + 'create_inspect_template': ('parent', 'inspect_template', 'template_id', 'location_id', ), + 'create_job_trigger': ('parent', 'job_trigger', 'trigger_id', 'location_id', ), + 'create_stored_info_type': ('parent', 'config', 'stored_info_type_id', 'location_id', ), + 'deidentify_content': ('parent', 'deidentify_config', 'inspect_config', 'item', 'inspect_template_name', 'deidentify_template_name', 'location_id', ), + 'delete_deidentify_template': ('name', ), + 'delete_dlp_job': ('name', ), + 'delete_inspect_template': ('name', ), + 'delete_job_trigger': ('name', ), + 'delete_stored_info_type': ('name', ), + 'finish_dlp_job': ('name', ), + 'get_deidentify_template': ('name', ), + 'get_dlp_job': ('name', ), + 'get_inspect_template': ('name', ), + 'get_job_trigger': ('name', ), + 'get_stored_info_type': ('name', ), + 'hybrid_inspect_dlp_job': ('name', 'hybrid_item', ), + 'hybrid_inspect_job_trigger': ('name', 'hybrid_item', ), + 'inspect_content': ('parent', 'inspect_config', 'item', 'inspect_template_name', 'location_id', ), + 'list_deidentify_templates': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), + 'list_dlp_jobs': ('parent', 'filter', 'page_size', 'page_token', 'type', 'order_by', 'location_id', ), + 'list_info_types': ('parent', 'language_code', 'filter', 'location_id', ), + 'list_inspect_templates': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), + 'list_job_triggers': ('parent', 'page_token', 'page_size', 'order_by', 'filter', 'location_id', ), + 'list_stored_info_types': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), + 'redact_image': ('parent', 'location_id', 'inspect_config', 'image_redaction_configs', 'include_findings', 'byte_item', ), + 'reidentify_content': ('parent', 'reidentify_config', 'inspect_config', 'item', 'inspect_template_name', 'reidentify_template_name', 'location_id', ), + 'update_deidentify_template': ('name', 'deidentify_template', 'update_mask', ), + 'update_inspect_template': ('name', 'inspect_template', 'update_mask', ), + 'update_job_trigger': ('name', 'job_trigger', 'update_mask', ), + 'update_stored_info_type': ('name', 'config', 'update_mask', ), + + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), + cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=dlpCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the dlp client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/setup.py b/setup.py index 362c2463..fcdcf708 100644 --- a/setup.py +++ b/setup.py @@ -24,8 +24,9 @@ version = "1.0.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", - 'enum34; python_version < "3.4"', + "google-api-core[grpc] >= 1.22.0, < 2.0.0dev", + "proto-plus >= 0.4.0", + "libcst >= 0.2.5", ] package_root = os.path.abspath(os.path.dirname(__file__)) @@ -35,7 +36,9 @@ readme = readme_file.read() packages = [ - package for package in setuptools.find_packages() if package.startswith("google") + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") ] namespaces = ["google"] @@ -56,12 +59,10 @@ "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", "Operating System :: OS Independent", "Topic :: Internet", ], @@ -69,7 +70,8 @@ packages=packages, namespace_packages=namespaces, install_requires=dependencies, - python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*", + python_requires=">=3.6", + scripts=["scripts/fixup_dlp_v2_keywords.py"], include_package_data=True, zip_safe=False, ) diff --git a/synth.metadata b/synth.metadata index 0ebb8d41..089ccad8 100644 --- a/synth.metadata +++ b/synth.metadata @@ -4,21 +4,21 @@ "git": { "name": ".", "remote": "https://github.com/googleapis/python-dlp.git", - "sha": "973bcc3783029e9b45b23fa13e52bcab4b6f2630" + "sha": "419772863bfa747cf0ef6278a931f95da70c954f" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "5747555f7620113d9a2078a48f4c047a99d31b3e" + "sha": "d3049e66447b44dc10579e461d5e08e0e3838edd" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "5747555f7620113d9a2078a48f4c047a99d31b3e" + "sha": "d3049e66447b44dc10579e461d5e08e0e3838edd" } } ], diff --git a/synth.py b/synth.py index 802c4faa..91b953d9 100644 --- a/synth.py +++ b/synth.py @@ -32,237 +32,19 @@ version="v2", bazel_target="//google/privacy/dlp/v2:privacy-dlp-v2-py", include_protos=True, + proto_output_path=f"google/cloud/dlp_v2/proto", ) excludes = ["README.rst", "nox.py", "setup.py", "docs/index.rst"] s.move(library, excludes=excludes) -# Fix namespace -s.replace("google/**/*.py", "google\.cloud\.privacy\.dlp_v2", "google.cloud.dlp_v2") - -s.replace( - "google/cloud/dlp_v2/gapic/dlp_service_client.py", - "google-cloud-privacy-dlp", - "google-cloud-dlp", -) - -# Add missing utf-8 marker -s.replace( - "google/cloud/dlp_v2/proto/dlp_pb2.py", - "# Generated by the protocol buffer compiler. DO NOT EDIT!", - "# -*- coding: utf-8 -*-\n\g<0>", -) - -# Fix raw-latex bits in storage_pb2.py -s.replace( - "google/cloud/dlp_v2/proto/storage_pb2.py", - "number regex.*\n(\s+)latex:.*\n", - r'number regex "(\\d\{3\}) \\d\{3\}-\\d\{4\} "\\\n' - "\g<1>could be adjusted upwards if the area code is \\\n", -) - -# Fix Docstrings in google/cloud/dlp_v2/proto/storage_pb2.py -s.replace( - "google/cloud/dlp_v2/proto/storage_pb2.py", - "(hotword_regex:)\n(\s+Regular expression.*)\n", - "\g<1> \\\n\g<2> \\\n", -) - -s.replace( - "google/cloud/dlp_v2/proto/storage_pb2.py", - "(likelihood_adjustment:)\n", - "\g<1> \\\n", -) - -# Fix Docstrings in google/cloud/dlp_v2/proto/dlp_pb2.py -s.replace( - "google/cloud/dlp_v2/proto/dlp_pb2.py", - "(max_findings_per_item:)\n(\s+Max number.*)\n(\s+scanned. When.*)\n" - "(\s+maximum returned is 1000.*)\n(\s+When set within.*)\n", - "\g<1> \\\n\g<2> \\\n\g<3> \\\n\g<4> \\\n\g<5> \\\n", -) - -s.replace( - "google/cloud/dlp_v2/proto/dlp_pb2.py", - "(max_findings_per_request:)\n(\s+Max number of.*)\n(\s+When set .*)\n", - "\g<1> \\\n\g<2> \\\n\g<3> \\\n", -) - -s.replace( - "google/cloud/dlp_v2/proto/dlp_pb2.py", - "(max_findings_per_info_type:)\n", - "\g<1> \\\n", -) - -s.replace( - "google/cloud/dlp_v2/proto/dlp_pb2.py", - "(snapshot_inspect_template:)\n(\s+If run with an .*)\n", - "\g<1> \\\n\g<2> \\\n", -) - -to_replace = [ - "processed_bytes:", - "total_estimated_bytes:", - "info_type_stats:", - "Statistics of how many instances of each info type were found", - "requested_options:", -] - -for replace in to_replace: - s.replace("google/cloud/dlp_v2/proto/dlp_pb2.py", f"({replace})\n", "\g<1> \\\n") - -s.replace( - "google/cloud/dlp_v2/proto/dlp_pb2.py", - "(sensitive_value_frequency_lower_bound:)\n(\s+Lower bound.*)\n", - "\g<1> \\\n\g<2> \\\n", -) - -s.replace( - "google/cloud/dlp_v2/proto/dlp_pb2.py", - "(sensitive_value_frequency_upper_bound:)\n(\s+Upper bound.*)\n", - "\g<1> \\\n\g<2> \\\n", -) - -s.replace( - "google/cloud/dlp_v2/proto/dlp_pb2.py", - "(bucket_size:)\n(\s+Total number of equivalence.*)\n", - "\g<1> \\\n\g<2>\n", -) - -s.replace( - "google/cloud/dlp_v2/proto/dlp_pb2.py", - "(bucket_values:)\n(\s+Sample of equivalence.*)\n", - "\g<1> \\\n\g<2> \\\n", -) - -s.replace( - "google/cloud/dlp_v2/proto/dlp_pb2.py", - "(offset_minutes:)\n(\s+Set only.*)\n", - "\g<1> \\\n\g<2> \\\n", -) - -s.replace( - "google/cloud/dlp_v2/proto/dlp_pb2.py", - "(result:)\n(\s+A summary of the outcome of this inspect job.)", - "\g<1> \\\n\g<2>", -) - -s.replace( - "google/cloud/dlp_v2/proto/dlp_pb2.py", - "(storage_config:)\n(\s+The data to scan.\n)", - "\g<1> \\\n\g<2>", -) - -s.replace( - "google/cloud/dlp_v2/proto/dlp_pb2.py", - "(inspect_config:)\n(\s+How and what to scan for.\n)", - "\g<1> \\\n\g<2>", -) - -s.replace( - "google/cloud/dlp_v2/proto/dlp_pb2.py", - "(inspect_template_name:)\n(\s+If provided, will be.*)\n" - "(\s+InspectConfig.*)\n(\s+values persisted.*)\n(\s+actions:)\n" - "(\s+Actions to.*)\n", - "\g<1> \\\n\g<2> \\\n\g<3> \\\n\g<4> \\\n\g<5> \\\n\g<6> \\\n", -) - -s.replace( - "google/cloud/dlp_v2/proto/dlp_pb2.py", - " (\s+Set of values defining the equivalence class.*)\n" - " (\s+quasi-identifier.*)\n" - " (\s+message. The order.*)\n", - "\g<1> \\\n\g<2> \\\n\g<3>\n", -) - -s.replace( - "google/cloud/dlp_v2/proto/dlp_pb2.py", - " (\s+Size of the equivalence class, for example number of rows with)\n" - " (\s+the above set of values.)\n", - "\g<1> \\\n\g<2>\n", -) - -s.replace( - "google/cloud/dlp_v2/proto/dlp_pb2.py", - "(equivalence_class_size_lower_bound:)\n(\s+Lower bound.*)\n", - "\g<1> \\\n\g<2> \\\n", -) - -s.replace( - "google/cloud/dlp_v2/proto/dlp_pb2.py", - "(equivalence_class_size_upper_bound:)\n(\s+Upper bound.*)\n", - "\g<1> \\\n\g<2> \\\n", -) - -s.replace( - "google/cloud/dlp_v2/proto/dlp_pb2.py", - "(bucket_value_count:)\n(\s+Total number of distinct equivalence.*)\n", - "\g<1> \\\n\g<2>\n", -) - -# Docstrings from categorical histogram bucket -s.replace( - "google/cloud/dlp_v2/proto/dlp_pb2.py", - "(value_frequency_lower_bound:)\n\s+(Lower bound.*)\n\s+(bucket.\n)" - "(\s+value_frequency_upper.*)\n\s+(Upper.*)\n\s+(bucket.\n)" - "(\s+bucket_size:)\n\s+(Total.*\n)" - "(\s+bucket_values:)\n\s+(Sample of value.*)\n\s+(of values.*\n)" - "(\s+bucket_value_count:)\n\s+(Total number.*\n)", - "\g<1> \g<2> \g<3>\g<4> \g<5> \g<6>\g<7> \g<8>" "\g<9> \g<10> \g<11>\g<12> \g<13>", -) - -# Fix docstrings tagged field indentation issues in dlp_pb2.py -s.replace( - "google/cloud/dlp_v2/proto/dlp_pb2.py", - "(DESCRIPTOR .*_TAGGEDFIELD,\n\s+__module__.*\n\s+,\n\s+__doc__.*\n)" - "(\s+field:)\n(\s+Identifies .*)\n(\s+tag:)\n(\s+Semantic.*)\n" - "(\s+determine.*)\n(\s+reidentifiability.*)\n(\s+info_type:)\n" - "(\s+A column.*)\n(\s+public dataset.*)\n(\s+available.*)\n(\s+ages.*)\n" - "(\s+supported Info.*)\n(\s+supported.*)\n(\s+custom_tag:)\n(\s+A col.*)\n" - "(\s+user must.*)\n(\s+statist.*)\n(\s+\(below.*)\n(\s+inferred:)\n" - "(\s+If no semantic.*)\n", - "\g<1>\g<2> \\\n\g<3>\n\g<4> \\\n\g<5> \\\n\g<6> \\\n" - "\g<7> \\\n\g<8> \\\n\g<9> \\\n\g<10> \\\n\g<11> \\\n\g<12> \\\n" - "\g<13> \\\n\g<14>\n\g<15> \\\n\g<16> \\\n\g<17> \\\n\g<18> \\\n" - "\g<19>\n\g<20> \\\n\g<21> \\\n", -) - -s.replace( - "google/cloud/dlp_v2/proto/dlp_pb2.py", - r'''(\s+)__doc__ = """Attributes:''', - r'\g<1>__doc="""\n Attributes:', -) - - -s.replace( - "google/cloud/dlp_v2/proto/dlp_pb2.py", - "(////////.*)\n\s+(///////////////\n)", - "\g<1> \g<2>", -) - -# Fix Docstrings in google/cloud/dlp_v2/gapic/dlp_service_client.py -s.replace( - "google/cloud/dlp_v2/gapic/dlp_service_client.py", - "^\s+resource was created.", - " \g<0>", -) - -# Fix Docstrings in google/cloud/dlp_v2/gapic/enums.py -s.replace( - "google/cloud/dlp_v2/gapic/enums.py", - "(\s+)WHITESPACE \(int\).*\n", - "\g<1>WHITESPACE (int): Whitespace character\n", -) - -s.replace("google/cloud/dlp_v2/gapic/enums.py", ".*:raw-latex:.*\n", "") - # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- templated_files = common.py_library( - cov_level=73, system_test_dependencies=["test_utils"], samples=True + system_test_dependencies=["test_utils"], samples=True, microgenerator=True ) -s.move(templated_files) +s.move(templated_files, excludes=[".coveragerc"]) # microgenerator has a good .coveragerc file # ---------------------------------------------------------------------------- # Samples templates diff --git a/tests/system/gapic/v2/test_system_dlp_service_v2.py b/tests/system/gapic/v2/test_system_dlp_service_v2.py index e24f6a22..ee3151fb 100644 --- a/tests/system/gapic/v2/test_system_dlp_service_v2.py +++ b/tests/system/gapic/v2/test_system_dlp_service_v2.py @@ -16,8 +16,6 @@ import os from google.cloud import dlp_v2 -from google.cloud.dlp_v2 import enums -from google.cloud.dlp_v2.proto import dlp_pb2 class TestSystemDlpService(object): @@ -33,9 +31,11 @@ def test_inspect_content(self): project_id = self._get_project_id() client = dlp_v2.DlpServiceClient() - min_likelihood = enums.Likelihood.POSSIBLE + min_likelihood = dlp_v2.Likelihood.POSSIBLE info_types = [{"name": "FIRST_NAME"}, {"name": "LAST_NAME"}] inspect_config = {"info_types": info_types, "min_likelihood": min_likelihood} item = {"value": "Robert Frost"} - parent = client.project_path(project_id) - response = client.inspect_content(parent, inspect_config, item) + parent = f"projects/{project_id}" + response = client.inspect_content( + request={"parent": parent, "inspect_config": inspect_config, "item": item} + ) diff --git a/tests/system/gapic/v2/test_system_dlp_service_v2_vpcsc.py b/tests/system/gapic/v2/test_system_dlp_service_v2_vpcsc.py index e90237c2..89ce8b85 100644 --- a/tests/system/gapic/v2/test_system_dlp_service_v2_vpcsc.py +++ b/tests/system/gapic/v2/test_system_dlp_service_v2_vpcsc.py @@ -18,8 +18,6 @@ import pytest from google.cloud import dlp_v2 -from google.cloud.dlp_v2 import enums -from google.cloud.dlp_v2.proto import dlp_pb2 from google.api_core import exceptions from test_utils.vpcsc_config import vpcsc_config @@ -35,12 +33,12 @@ def client(): @pytest.fixture(scope="module") def name_inside(client): - return client.project_path(vpcsc_config.project_inside) + return f"projects/{vpcsc_config.project_inside}" @pytest.fixture(scope="module") def name_outside(client): - return client.project_path(vpcsc_config.project_outside) + return f"projects/{vpcsc_config.project_outside}" @pytest.fixture(scope="module") @@ -55,13 +53,17 @@ def bytes_content_item(): @vpcsc_config.skip_unless_inside_vpcsc def test_inspect_content_inside(client, name_inside, content_item): - client.inspect_content(name_inside, item=content_item) # no perms issue + client.inspect_content( + request={"parent": name_inside, "inspect_config": content_item} + ) # no perms issue @vpcsc_config.skip_unless_inside_vpcsc def test_inspect_content_outside(client, name_outside, content_item): with pytest.raises(exceptions.PermissionDenied) as exc: - client.inspect_content(name_outside, item=content_item) + client.inspect_content( + request={"parent": name_outside, "inspect_config": content_item} + ) assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message @@ -69,13 +71,17 @@ def test_inspect_content_outside(client, name_outside, content_item): @vpcsc_config.skip_unless_inside_vpcsc def test_redact_image_inside(client, name_inside, bytes_content_item): with pytest.raises(exceptions.InvalidArgument): # no perms issue - client.redact_image(name_inside, byte_item=bytes_content_item) + client.redact_image( + request={"parent": name_inside, "location_id": bytes_content_item} + ) @vpcsc_config.skip_unless_inside_vpcsc def test_redact_image_outside(client, name_outside, bytes_content_item): with pytest.raises(exceptions.PermissionDenied) as exc: - client.redact_image(name_outside, byte_item=bytes_content_item) + client.redact_image( + request={"parent": name_outside, "location_id": bytes_content_item} + ) assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message @@ -83,13 +89,17 @@ def test_redact_image_outside(client, name_outside, bytes_content_item): @vpcsc_config.skip_unless_inside_vpcsc def test_deidentify_content_inside(client, name_inside, content_item): with pytest.raises(exceptions.InvalidArgument): # no perms issue - client.deidentify_content(name_inside, item=content_item) + client.deidentify_content( + request={"parent": name_inside, "deidentify_config": content_item} + ) @vpcsc_config.skip_unless_inside_vpcsc def test_deidentify_content_outside(client, name_outside, content_item): with pytest.raises(exceptions.PermissionDenied) as exc: - client.deidentify_content(name_outside, item=content_item) + client.deidentify_content( + request={"parent": name_outside, "deidentify_config": content_item} + ) assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message @@ -97,13 +107,17 @@ def test_deidentify_content_outside(client, name_outside, content_item): @vpcsc_config.skip_unless_inside_vpcsc def test_reidentify_content_inside(client, name_inside, content_item): with pytest.raises(exceptions.InvalidArgument): # no perms issue - client.reidentify_content(name_inside, item=content_item) + client.reidentify_content( + request={"parent": name_inside, "reidentify_config": content_item} + ) @vpcsc_config.skip_unless_inside_vpcsc def test_reidentify_content_outside(client, name_outside, content_item): with pytest.raises(exceptions.PermissionDenied) as exc: - client.reidentify_content(name_outside, item=content_item) + client.reidentify_content( + request={"parent": name_outside, "reidentify_config": content_item} + ) assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message @@ -111,7 +125,7 @@ def test_reidentify_content_outside(client, name_outside, content_item): @pytest.fixture(scope="module") def inspect_template_path_inside(client): inspect_template_id = 1234567 - return client.project_inspect_template_path( + return client.inspect_template_path( vpcsc_config.project_inside, inspect_template_id ) @@ -119,7 +133,7 @@ def inspect_template_path_inside(client): @pytest.fixture(scope="module") def inspect_template_path_outside(client): inspect_template_id = 1234567 - return client.project_inspect_template_path( + return client.inspect_template_path( vpcsc_config.project_outside, inspect_template_id ) @@ -133,59 +147,71 @@ def inspect_template(): class TestCRUDInspectTemplate(object): @staticmethod def test_create_inspect_template_inside(client, name_inside, inspect_template): - client.create_inspect_template(name_inside, inspect_template) # no perms issue + client.create_inspect_template( + request={"parent": name_inside, "inspect_template": inspect_template} + ) # no perms issue @staticmethod def test_create_inspect_template_outside(client, name_outside, inspect_template): with pytest.raises(exceptions.PermissionDenied) as exc: - client.create_inspect_template(name_outside, inspect_template) + client.create_inspect_template( + request={"parent": name_outside, "inspect_template": inspect_template} + ) assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message @staticmethod def test_list_inspect_templates_inside(client, name_inside): - list(client.list_inspect_templates(name_inside)) + list(client.list_inspect_templates(request={"parent": name_inside})) @staticmethod def test_list_inspect_templates_outside(client, name_outside): with pytest.raises(exceptions.PermissionDenied) as exc: - list(client.list_inspect_templates(name_outside)) + list(client.list_inspect_templates(request={"parent": name_outside})) assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message @staticmethod def test_update_inspect_template_inside(client, inspect_template_path_inside): with pytest.raises(exceptions.NotFound): # no perms issue - client.update_inspect_template(inspect_template_path_inside) + client.update_inspect_template( + request={"name": inspect_template_path_inside} + ) @staticmethod def test_update_inspect_template_outside(client, inspect_template_path_outside): with pytest.raises(exceptions.PermissionDenied) as exc: - client.update_inspect_template(inspect_template_path_outside) + client.update_inspect_template( + request={"name": inspect_template_path_outside} + ) assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message @staticmethod def test_get_inspect_template_inside(client, inspect_template_path_inside): with pytest.raises(exceptions.NotFound): # no perms issue - client.get_inspect_template(inspect_template_path_inside) + client.get_inspect_template(request={"name": inspect_template_path_inside}) @staticmethod def test_get_inspect_template_outside(client, inspect_template_path_outside): with pytest.raises(exceptions.PermissionDenied) as exc: - client.get_inspect_template(inspect_template_path_outside) + client.get_inspect_template(request={"name": inspect_template_path_outside}) assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message @staticmethod def test_delete_inspect_template_inside(client, inspect_template_path_inside): with pytest.raises(exceptions.NotFound): # no perms issue - client.delete_inspect_template(inspect_template_path_inside) + client.delete_inspect_template( + request={"name": inspect_template_path_inside} + ) @staticmethod def test_delete_inspect_template_outside(client, inspect_template_path_outside): with pytest.raises(exceptions.PermissionDenied) as exc: - client.delete_inspect_template(inspect_template_path_outside) + client.delete_inspect_template( + request={"name": inspect_template_path_outside} + ) assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message @@ -193,7 +219,7 @@ def test_delete_inspect_template_outside(client, inspect_template_path_outside): @pytest.fixture(scope="module") def deidentify_template_path_inside(client): deidentify_template_id = 1234567 - return client.project_deidentify_template_path( + return client.deidentify_template_path( vpcsc_config.project_inside, deidentify_template_id ) @@ -201,7 +227,7 @@ def deidentify_template_path_inside(client): @pytest.fixture(scope="module") def deidentify_template_path_outside(client): deidentify_template_id = 1234567 - return client.project_deidentify_template_path( + return client.deidentify_template_path( vpcsc_config.project_outside, deidentify_template_id ) @@ -217,65 +243,84 @@ class TestCRUDDeidentifyTemplate(object): def test_create_deidentify_template_inside( client, name_inside, deidentify_template ): - client.create_deidentify_template(name_inside, deidentify_template) + client.create_deidentify_template( + request={"parent": name_inside, "deidentify_template": deidentify_template} + ) @staticmethod def test_create_deidentify_template_outside( client, name_outside, deidentify_template ): with pytest.raises(exceptions.PermissionDenied) as exc: - client.create_deidentify_template(name_outside, deidentify_template) + client.create_deidentify_template( + request={ + "parent": name_outside, + "deidentify_template": deidentify_template, + } + ) assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message @staticmethod def test_list_deidentify_templates_inside(client, name_inside): - list(client.list_deidentify_templates(name_inside)) + list(client.list_deidentify_templates(request={"parent": name_inside})) @staticmethod def test_list_deidentify_templates_outside(client, name_outside): with pytest.raises(exceptions.PermissionDenied) as exc: - list(client.list_deidentify_templates(name_outside)) + list(client.list_deidentify_templates(request={"parent": name_outside})) assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message @staticmethod def test_update_deidentify_template_inside(client, deidentify_template_path_inside): with pytest.raises(exceptions.NotFound): # no perms issue - client.update_deidentify_template(deidentify_template_path_inside) + client.update_deidentify_template( + request={"name": deidentify_template_path_inside} + ) @staticmethod def test_update_deidentify_template_outside( client, deidentify_template_path_outside ): with pytest.raises(exceptions.PermissionDenied) as exc: - client.update_deidentify_template(deidentify_template_path_outside) + client.update_deidentify_template( + request={"name": deidentify_template_path_outside} + ) assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message @staticmethod def test_get_deidentify_template_inside(client, deidentify_template_path_inside): with pytest.raises(exceptions.NotFound): # no perms issue - client.get_deidentify_template(deidentify_template_path_inside) + client.get_deidentify_template( + request={"name": deidentify_template_path_inside} + ) @staticmethod def test_get_deidentify_template_outside(client, deidentify_template_path_outside): with pytest.raises(exceptions.PermissionDenied) as exc: - client.get_deidentify_template(deidentify_template_path_outside) + client.get_deidentify_template( + request={"name": deidentify_template_path_outside} + ) assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message @staticmethod def test_delete_deidentify_template_inside(client, deidentify_template_path_inside): with pytest.raises(exceptions.NotFound): # no perms issue - client.delete_deidentify_template(deidentify_template_path_inside) + client.delete_deidentify_template( + request={"name": deidentify_template_path_inside} + ) @staticmethod def test_delete_deidentify_template_outside( client, deidentify_template_path_outside ): with pytest.raises(exceptions.PermissionDenied) as exc: - client.delete_deidentify_template(deidentify_template_path_outside) + client.delete_deidentify_template( + request={"name": deidentify_template_path_outside} + ) assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message @@ -302,59 +347,63 @@ class TestCRUDDlpJob(object): @staticmethod def test_create_dlp_job_inside(client, name_inside, inspect_job): with pytest.raises(exceptions.InvalidArgument): # no perms issue - client.create_dlp_job(name_inside, inspect_job=inspect_job) + client.create_dlp_job( + request={"parent": name_inside, "inspect_job": inspect_job} + ) @staticmethod def test_create_dlp_job_outside(client, name_outside, inspect_job): with pytest.raises(exceptions.PermissionDenied) as exc: - client.create_dlp_job(name_outside, inspect_job=inspect_job) + client.create_dlp_job( + request={"parent": name_outside, "inspect_job": inspect_job} + ) assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message @staticmethod def test_list_dlp_jobs_inside(client, name_inside): - list(client.list_dlp_jobs(name_inside)) + list(client.list_dlp_jobs(request={"parent": name_inside})) @staticmethod def test_list_dlp_jobs_outside(client, name_outside): with pytest.raises(exceptions.PermissionDenied) as exc: - list(client.list_dlp_jobs(name_outside)) + list(client.list_dlp_jobs(request={"parent": name_outside})) assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message @staticmethod def test_get_dlp_job_inside(client, job_path_inside): with pytest.raises(exceptions.InvalidArgument): # no perms issue - client.get_dlp_job(job_path_inside) + client.get_dlp_job(request={"name": job_path_inside}) @staticmethod def test_get_dlp_job_outside(client, job_path_outside): with pytest.raises(exceptions.PermissionDenied) as exc: - client.get_dlp_job(job_path_outside) + client.get_dlp_job(request={"name": job_path_outside}) assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message @staticmethod def test_delete_dlp_job_inside(client, job_path_inside): with pytest.raises(exceptions.InvalidArgument): # no perms issue - client.delete_dlp_job(job_path_inside) + client.delete_dlp_job(request={"name": job_path_inside}) @staticmethod def test_delete_dlp_job_outside(client, job_path_outside): with pytest.raises(exceptions.PermissionDenied) as exc: - client.delete_dlp_job(job_path_outside) + client.delete_dlp_job(request={"name": job_path_outside}) assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message @staticmethod def test_cancel_dlp_job_inside(client, job_path_inside): with pytest.raises(exceptions.InvalidArgument): # no perms issue - client.cancel_dlp_job(job_path_inside) + client.cancel_dlp_job(request={"name": job_path_inside}) @staticmethod def test_cancel_dlp_job_outside(client, job_path_outside): with pytest.raises(exceptions.PermissionDenied) as exc: - client.cancel_dlp_job(job_path_outside) + client.cancel_dlp_job(request={"name": job_path_outside}) assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message @@ -362,13 +411,13 @@ def test_cancel_dlp_job_outside(client, job_path_outside): @pytest.fixture(scope="module") def job_trigger_path_inside(client): job_trigger_id = 1234567 - return client.project_job_trigger_path(vpcsc_config.project_inside, job_trigger_id) + return client.job_trigger_path(vpcsc_config.project_inside, job_trigger_id) @pytest.fixture(scope="module") def job_trigger_path_outside(client): job_trigger_id = 1234567 - return client.project_job_trigger_path(vpcsc_config.project_outside, job_trigger_id) + return client.job_trigger_path(vpcsc_config.project_outside, job_trigger_id) @pytest.fixture(scope="module") @@ -381,59 +430,63 @@ class TestCRUDJobTrigger(object): @staticmethod def test_create_job_trigger_inside(client, name_inside, job_trigger): with pytest.raises(exceptions.InvalidArgument): # no perms issue - client.create_job_trigger(name_inside, job_trigger) + client.create_job_trigger( + request={"parent": name_inside, "job_trigger": job_trigger} + ) @staticmethod def test_create_job_trigger_outside(client, name_outside, job_trigger): with pytest.raises(exceptions.PermissionDenied) as exc: - client.create_job_trigger(name_outside, job_trigger) + client.create_job_trigger( + request={"parent": name_outside, "job_trigger": job_trigger} + ) assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message @staticmethod def test_list_job_triggers_inside(client, name_inside): - list(client.list_job_triggers(name_inside)) + list(client.list_job_triggers(request={"parent": name_inside})) @staticmethod def test_list_job_triggers_outside(client, name_outside): with pytest.raises(exceptions.PermissionDenied) as exc: - list(client.list_job_triggers(name_outside)) + list(client.list_job_triggers(request={"parent": name_outside})) assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message @staticmethod def test_get_job_trigger_inside(client, job_trigger_path_inside): with pytest.raises(exceptions.NotFound): # no perms issue - client.get_job_trigger(job_trigger_path_inside) + client.get_job_trigger(request={"name": job_trigger_path_inside}) @staticmethod def test_get_job_trigger_outside(client, job_trigger_path_outside): with pytest.raises(exceptions.PermissionDenied) as exc: - client.get_job_trigger(job_trigger_path_outside) + client.get_job_trigger(request={"name": job_trigger_path_outside}) assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message @staticmethod def test_update_job_trigger_inside(client, job_trigger_path_inside): with pytest.raises(exceptions.InvalidArgument): # no perms issue - client.update_job_trigger(job_trigger_path_inside) + client.update_job_trigger(request={"name": job_trigger_path_inside}) @staticmethod def test_update_job_trigger_outside(client, job_trigger_path_outside): with pytest.raises(exceptions.PermissionDenied) as exc: - client.update_job_trigger(job_trigger_path_outside) + client.update_job_trigger(request={"name": job_trigger_path_outside}) assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message @staticmethod def test_delete_job_trigger_inside(client, job_trigger_path_inside): with pytest.raises(exceptions.NotFound): # no perms issue - client.delete_job_trigger(job_trigger_path_inside) + client.delete_job_trigger(request={"name": job_trigger_path_inside}) @staticmethod def test_delete_job_trigger_outside(client, job_trigger_path_outside): with pytest.raises(exceptions.PermissionDenied) as exc: - client.delete_job_trigger(job_trigger_path_outside) + client.delete_job_trigger(request={"name": job_trigger_path_outside}) assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message @@ -441,7 +494,7 @@ def test_delete_job_trigger_outside(client, job_trigger_path_outside): @pytest.fixture(scope="module") def stored_info_type_path_inside(client): stored_info_type_id = 1234567 - return client.project_stored_info_type_path( + return client.stored_info_type_path( vpcsc_config.project_inside, stored_info_type_id ) @@ -449,7 +502,7 @@ def stored_info_type_path_inside(client): @pytest.fixture(scope="module") def stored_info_type_path_outside(client): stored_info_type_id = 1234567 - return client.project_stored_info_type_path( + return client.stored_info_type_path( vpcsc_config.project_outside, stored_info_type_id ) @@ -466,60 +519,72 @@ def test_create_stored_info_type_inside( client, name_inside, stored_info_type_config ): with pytest.raises(exceptions.InvalidArgument): # no perms issue - client.create_stored_info_type(name_inside, stored_info_type_config) + client.create_stored_info_type( + request={"parent": name_inside, "config": stored_info_type_config} + ) @staticmethod def test_create_stored_info_type_outside( client, name_outside, stored_info_type_config ): with pytest.raises(exceptions.PermissionDenied) as exc: - client.create_stored_info_type(name_outside, stored_info_type_config) + client.create_stored_info_type( + request={"parent": name_outside, "config": stored_info_type_config} + ) assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message @staticmethod def test_list_stored_info_types_inside(client, name_inside): - list(client.list_stored_info_types(name_inside)) + list(client.list_stored_info_types(request={"parent": name_inside})) @staticmethod def test_list_stored_info_types_outside(client, name_outside): with pytest.raises(exceptions.PermissionDenied) as exc: - list(client.list_stored_info_types(name_outside)) + list(client.list_stored_info_types(request={"parent": name_outside})) assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message @staticmethod def test_get_stored_info_type_inside(client, stored_info_type_path_inside): with pytest.raises(exceptions.NotFound): # no perms issue - client.get_stored_info_type(stored_info_type_path_inside) + client.get_stored_info_type(request={"name": stored_info_type_path_inside}) @staticmethod def test_get_stored_info_type_outside(client, stored_info_type_path_outside): with pytest.raises(exceptions.PermissionDenied) as exc: - client.get_stored_info_type(stored_info_type_path_outside) + client.get_stored_info_type(request={"name": stored_info_type_path_outside}) assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message @staticmethod def test_update_stored_info_type_inside(client, stored_info_type_path_inside): with pytest.raises(exceptions.NotFound): # no perms issue - client.update_stored_info_type(stored_info_type_path_inside) + client.update_stored_info_type( + request={"name": stored_info_type_path_inside} + ) @staticmethod def test_update_stored_info_type_outside(client, stored_info_type_path_outside): with pytest.raises(exceptions.PermissionDenied) as exc: - client.update_stored_info_type(stored_info_type_path_outside) + client.update_stored_info_type( + request={"name": stored_info_type_path_outside} + ) assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message @staticmethod def test_delete_stored_info_type_inside(client, stored_info_type_path_inside): with pytest.raises(exceptions.NotFound): # no perms issue - client.delete_stored_info_type(stored_info_type_path_inside) + client.delete_stored_info_type( + request={"name": stored_info_type_path_inside} + ) @staticmethod def test_delete_stored_info_type_outside(client, stored_info_type_path_outside): with pytest.raises(exceptions.PermissionDenied) as exc: - client.delete_stored_info_type(stored_info_type_path_outside) + client.delete_stored_info_type( + request={"name": stored_info_type_path_outside} + ) assert _VPCSC_PROHIBITED_MESSAGE in exc.value.message diff --git a/tests/unit/gapic/dlp_v2/__init__.py b/tests/unit/gapic/dlp_v2/__init__.py new file mode 100644 index 00000000..8b137891 --- /dev/null +++ b/tests/unit/gapic/dlp_v2/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/unit/gapic/dlp_v2/test_dlp_service.py b/tests/unit/gapic/dlp_v2/test_dlp_service.py new file mode 100644 index 00000000..040a033a --- /dev/null +++ b/tests/unit/gapic/dlp_v2/test_dlp_service.py @@ -0,0 +1,7893 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + +from google import auth +from google.api_core import client_options +from google.api_core import exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.dlp_v2.services.dlp_service import DlpServiceAsyncClient +from google.cloud.dlp_v2.services.dlp_service import DlpServiceClient +from google.cloud.dlp_v2.services.dlp_service import pagers +from google.cloud.dlp_v2.services.dlp_service import transports +from google.cloud.dlp_v2.types import dlp +from google.cloud.dlp_v2.types import storage +from google.oauth2 import service_account +from google.protobuf import any_pb2 as any # type: ignore +from google.protobuf import duration_pb2 as duration # type: ignore +from google.protobuf import field_mask_pb2 as field_mask # type: ignore +from google.protobuf import timestamp_pb2 as timestamp # type: ignore +from google.rpc import status_pb2 as status # type: ignore +from google.type import date_pb2 as date # type: ignore +from google.type import dayofweek_pb2 as dayofweek # type: ignore +from google.type import timeofday_pb2 as timeofday # type: ignore + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DlpServiceClient._get_default_mtls_endpoint(None) is None + assert ( + DlpServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + ) + assert ( + DlpServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + DlpServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DlpServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert DlpServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [DlpServiceClient, DlpServiceAsyncClient]) +def test_dlp_service_client_from_service_account_file(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client._transport._credentials == creds + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client._transport._credentials == creds + + assert client._transport._host == "dlp.googleapis.com:443" + + +def test_dlp_service_client_get_transport_class(): + transport = DlpServiceClient.get_transport_class() + assert transport == transports.DlpServiceGrpcTransport + + transport = DlpServiceClient.get_transport_class("grpc") + assert transport == transports.DlpServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc"), + ( + DlpServiceAsyncClient, + transports.DlpServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + DlpServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceClient) +) +@mock.patch.object( + DlpServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DlpServiceAsyncClient), +) +def test_dlp_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DlpServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DlpServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + api_mtls_endpoint="squid.clam.whelk", + client_cert_source=None, + quota_project_id=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", and client_cert_source is provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=client_cert_source_callback, + quota_project_id=None, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", and default_client_cert_source is provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", but client_cert_source and default_client_cert_source are None. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc"), + ( + DlpServiceAsyncClient, + transports.DlpServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_dlp_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc"), + ( + DlpServiceAsyncClient, + transports.DlpServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_dlp_service_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + ) + + +def test_dlp_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = DlpServiceClient(client_options={"api_endpoint": "squid.clam.whelk"}) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + api_mtls_endpoint="squid.clam.whelk", + client_cert_source=None, + quota_project_id=None, + ) + + +def test_inspect_content( + transport: str = "grpc", request_type=dlp.InspectContentRequest +): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.inspect_content), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectContentResponse() + + response = client.inspect_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.InspectContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectContentResponse) + + +def test_inspect_content_from_dict(): + test_inspect_content(request_type=dict) + + +@pytest.mark.asyncio +async def test_inspect_content_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.InspectContentRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.inspect_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.InspectContentResponse() + ) + + response = await client.inspect_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectContentResponse) + + +def test_inspect_content_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.InspectContentRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.inspect_content), "__call__") as call: + call.return_value = dlp.InspectContentResponse() + + client.inspect_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_inspect_content_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.InspectContentRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.inspect_content), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.InspectContentResponse() + ) + + await client.inspect_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_redact_image(transport: str = "grpc", request_type=dlp.RedactImageRequest): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.redact_image), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.RedactImageResponse( + redacted_image=b"redacted_image_blob", + extracted_text="extracted_text_value", + ) + + response = client.redact_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.RedactImageRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.RedactImageResponse) + + assert response.redacted_image == b"redacted_image_blob" + + assert response.extracted_text == "extracted_text_value" + + +def test_redact_image_from_dict(): + test_redact_image(request_type=dict) + + +@pytest.mark.asyncio +async def test_redact_image_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.RedactImageRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.redact_image), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.RedactImageResponse( + redacted_image=b"redacted_image_blob", + extracted_text="extracted_text_value", + ) + ) + + response = await client.redact_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.RedactImageResponse) + + assert response.redacted_image == b"redacted_image_blob" + + assert response.extracted_text == "extracted_text_value" + + +def test_redact_image_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.RedactImageRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.redact_image), "__call__") as call: + call.return_value = dlp.RedactImageResponse() + + client.redact_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_redact_image_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.RedactImageRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.redact_image), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.RedactImageResponse() + ) + + await client.redact_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_deidentify_content( + transport: str = "grpc", request_type=dlp.DeidentifyContentRequest +): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.deidentify_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyContentResponse() + + response = client.deidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.DeidentifyContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyContentResponse) + + +def test_deidentify_content_from_dict(): + test_deidentify_content(request_type=dict) + + +@pytest.mark.asyncio +async def test_deidentify_content_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.DeidentifyContentRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.deidentify_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.DeidentifyContentResponse() + ) + + response = await client.deidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyContentResponse) + + +def test_deidentify_content_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeidentifyContentRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.deidentify_content), "__call__" + ) as call: + call.return_value = dlp.DeidentifyContentResponse() + + client.deidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_deidentify_content_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeidentifyContentRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.deidentify_content), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.DeidentifyContentResponse() + ) + + await client.deidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_reidentify_content( + transport: str = "grpc", request_type=dlp.ReidentifyContentRequest +): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.reidentify_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ReidentifyContentResponse() + + response = client.reidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.ReidentifyContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ReidentifyContentResponse) + + +def test_reidentify_content_from_dict(): + test_reidentify_content(request_type=dict) + + +@pytest.mark.asyncio +async def test_reidentify_content_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.ReidentifyContentRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.reidentify_content), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ReidentifyContentResponse() + ) + + response = await client.reidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ReidentifyContentResponse) + + +def test_reidentify_content_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ReidentifyContentRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.reidentify_content), "__call__" + ) as call: + call.return_value = dlp.ReidentifyContentResponse() + + client.reidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_reidentify_content_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ReidentifyContentRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.reidentify_content), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ReidentifyContentResponse() + ) + + await client.reidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_info_types( + transport: str = "grpc", request_type=dlp.ListInfoTypesRequest +): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_info_types), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInfoTypesResponse() + + response = client.list_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.ListInfoTypesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ListInfoTypesResponse) + + +def test_list_info_types_from_dict(): + test_list_info_types(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_info_types_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.ListInfoTypesRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_info_types), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListInfoTypesResponse() + ) + + response = await client.list_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ListInfoTypesResponse) + + +def test_list_info_types_flattened(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_info_types), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInfoTypesResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_info_types(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_list_info_types_flattened_error(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_info_types( + dlp.ListInfoTypesRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_info_types_flattened_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_info_types), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInfoTypesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListInfoTypesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_info_types(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_info_types_flattened_error_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_info_types( + dlp.ListInfoTypesRequest(), parent="parent_value", + ) + + +def test_create_inspect_template( + transport: str = "grpc", request_type=dlp.CreateInspectTemplateRequest +): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + + response = client.create_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.CreateInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + +def test_create_inspect_template_from_dict(): + test_create_inspect_template(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_inspect_template_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.CreateInspectTemplateRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.InspectTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + ) + + response = await client.create_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + +def test_create_inspect_template_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateInspectTemplateRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_inspect_template), "__call__" + ) as call: + call.return_value = dlp.InspectTemplate() + + client.create_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_inspect_template_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateInspectTemplateRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_inspect_template), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + + await client.create_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_inspect_template_flattened(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_inspect_template( + parent="parent_value", + inspect_template=dlp.InspectTemplate(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].inspect_template == dlp.InspectTemplate(name="name_value") + + +def test_create_inspect_template_flattened_error(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_inspect_template( + dlp.CreateInspectTemplateRequest(), + parent="parent_value", + inspect_template=dlp.InspectTemplate(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_inspect_template_flattened_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_inspect_template( + parent="parent_value", + inspect_template=dlp.InspectTemplate(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].inspect_template == dlp.InspectTemplate(name="name_value") + + +@pytest.mark.asyncio +async def test_create_inspect_template_flattened_error_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_inspect_template( + dlp.CreateInspectTemplateRequest(), + parent="parent_value", + inspect_template=dlp.InspectTemplate(name="name_value"), + ) + + +def test_update_inspect_template( + transport: str = "grpc", request_type=dlp.UpdateInspectTemplateRequest +): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.update_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + + response = client.update_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.UpdateInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + +def test_update_inspect_template_from_dict(): + test_update_inspect_template(request_type=dict) + + +@pytest.mark.asyncio +async def test_update_inspect_template_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.UpdateInspectTemplateRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.InspectTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + ) + + response = await client.update_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + +def test_update_inspect_template_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateInspectTemplateRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.update_inspect_template), "__call__" + ) as call: + call.return_value = dlp.InspectTemplate() + + client.update_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_inspect_template_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateInspectTemplateRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_inspect_template), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + + await client.update_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_update_inspect_template_flattened(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.update_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_inspect_template( + name="name_value", + inspect_template=dlp.InspectTemplate(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].inspect_template == dlp.InspectTemplate(name="name_value") + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +def test_update_inspect_template_flattened_error(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_inspect_template( + dlp.UpdateInspectTemplateRequest(), + name="name_value", + inspect_template=dlp.InspectTemplate(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_inspect_template_flattened_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_inspect_template( + name="name_value", + inspect_template=dlp.InspectTemplate(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].inspect_template == dlp.InspectTemplate(name="name_value") + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +@pytest.mark.asyncio +async def test_update_inspect_template_flattened_error_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_inspect_template( + dlp.UpdateInspectTemplateRequest(), + name="name_value", + inspect_template=dlp.InspectTemplate(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +def test_get_inspect_template( + transport: str = "grpc", request_type=dlp.GetInspectTemplateRequest +): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + + response = client.get_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.GetInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + +def test_get_inspect_template_from_dict(): + test_get_inspect_template(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_inspect_template_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.GetInspectTemplateRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.InspectTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + ) + + response = await client.get_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + +def test_get_inspect_template_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetInspectTemplateRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_inspect_template), "__call__" + ) as call: + call.return_value = dlp.InspectTemplate() + + client.get_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_inspect_template_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetInspectTemplateRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_inspect_template), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + + await client.get_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_inspect_template_flattened(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_inspect_template(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_inspect_template_flattened_error(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_inspect_template( + dlp.GetInspectTemplateRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_inspect_template_flattened_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_inspect_template(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_inspect_template_flattened_error_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_inspect_template( + dlp.GetInspectTemplateRequest(), name="name_value", + ) + + +def test_list_inspect_templates( + transport: str = "grpc", request_type=dlp.ListInspectTemplatesRequest +): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_inspect_templates), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInspectTemplatesResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_inspect_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.ListInspectTemplatesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInspectTemplatesPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_inspect_templates_from_dict(): + test_list_inspect_templates(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_inspect_templates_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.ListInspectTemplatesRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_inspect_templates), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListInspectTemplatesResponse(next_page_token="next_page_token_value",) + ) + + response = await client.list_inspect_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInspectTemplatesAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_inspect_templates_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListInspectTemplatesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_inspect_templates), "__call__" + ) as call: + call.return_value = dlp.ListInspectTemplatesResponse() + + client.list_inspect_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_inspect_templates_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListInspectTemplatesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_inspect_templates), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListInspectTemplatesResponse() + ) + + await client.list_inspect_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_inspect_templates_flattened(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_inspect_templates), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInspectTemplatesResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_inspect_templates(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_list_inspect_templates_flattened_error(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_inspect_templates( + dlp.ListInspectTemplatesRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_inspect_templates_flattened_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_inspect_templates), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInspectTemplatesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListInspectTemplatesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_inspect_templates(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_inspect_templates_flattened_error_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_inspect_templates( + dlp.ListInspectTemplatesRequest(), parent="parent_value", + ) + + +def test_list_inspect_templates_pager(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_inspect_templates), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token="abc", + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], next_page_token="def", + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[dlp.InspectTemplate(),], next_page_token="ghi", + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[dlp.InspectTemplate(), dlp.InspectTemplate(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_inspect_templates(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, dlp.InspectTemplate) for i in results) + + +def test_list_inspect_templates_pages(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_inspect_templates), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token="abc", + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], next_page_token="def", + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[dlp.InspectTemplate(),], next_page_token="ghi", + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[dlp.InspectTemplate(), dlp.InspectTemplate(),], + ), + RuntimeError, + ) + pages = list(client.list_inspect_templates(request={}).pages) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_inspect_templates_async_pager(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_inspect_templates), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token="abc", + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], next_page_token="def", + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[dlp.InspectTemplate(),], next_page_token="ghi", + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[dlp.InspectTemplate(), dlp.InspectTemplate(),], + ), + RuntimeError, + ) + async_pager = await client.list_inspect_templates(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.InspectTemplate) for i in responses) + + +@pytest.mark.asyncio +async def test_list_inspect_templates_async_pages(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_inspect_templates), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token="abc", + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], next_page_token="def", + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[dlp.InspectTemplate(),], next_page_token="ghi", + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[dlp.InspectTemplate(), dlp.InspectTemplate(),], + ), + RuntimeError, + ) + pages = [] + async for page in (await client.list_inspect_templates(request={})).pages: + pages.append(page) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +def test_delete_inspect_template( + transport: str = "grpc", request_type=dlp.DeleteInspectTemplateRequest +): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.DeleteInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_inspect_template_from_dict(): + test_delete_inspect_template(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_inspect_template_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.DeleteInspectTemplateRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_inspect_template_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteInspectTemplateRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_inspect_template), "__call__" + ) as call: + call.return_value = None + + client.delete_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_inspect_template_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteInspectTemplateRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_inspect_template), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_inspect_template_flattened(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_inspect_template(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_delete_inspect_template_flattened_error(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_inspect_template( + dlp.DeleteInspectTemplateRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_inspect_template_flattened_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_inspect_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_inspect_template(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_inspect_template_flattened_error_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_inspect_template( + dlp.DeleteInspectTemplateRequest(), name="name_value", + ) + + +def test_create_deidentify_template( + transport: str = "grpc", request_type=dlp.CreateDeidentifyTemplateRequest +): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + + response = client.create_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.CreateDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + +def test_create_deidentify_template_from_dict(): + test_create_deidentify_template(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_deidentify_template_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.CreateDeidentifyTemplateRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.DeidentifyTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + ) + + response = await client.create_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + +def test_create_deidentify_template_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateDeidentifyTemplateRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_deidentify_template), "__call__" + ) as call: + call.return_value = dlp.DeidentifyTemplate() + + client.create_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_deidentify_template_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateDeidentifyTemplateRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_deidentify_template), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.DeidentifyTemplate() + ) + + await client.create_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_deidentify_template_flattened(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_deidentify_template( + parent="parent_value", + deidentify_template=dlp.DeidentifyTemplate(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].deidentify_template == dlp.DeidentifyTemplate(name="name_value") + + +def test_create_deidentify_template_flattened_error(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_deidentify_template( + dlp.CreateDeidentifyTemplateRequest(), + parent="parent_value", + deidentify_template=dlp.DeidentifyTemplate(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_deidentify_template_flattened_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.DeidentifyTemplate() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_deidentify_template( + parent="parent_value", + deidentify_template=dlp.DeidentifyTemplate(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].deidentify_template == dlp.DeidentifyTemplate(name="name_value") + + +@pytest.mark.asyncio +async def test_create_deidentify_template_flattened_error_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_deidentify_template( + dlp.CreateDeidentifyTemplateRequest(), + parent="parent_value", + deidentify_template=dlp.DeidentifyTemplate(name="name_value"), + ) + + +def test_update_deidentify_template( + transport: str = "grpc", request_type=dlp.UpdateDeidentifyTemplateRequest +): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.update_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + + response = client.update_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.UpdateDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + +def test_update_deidentify_template_from_dict(): + test_update_deidentify_template(request_type=dict) + + +@pytest.mark.asyncio +async def test_update_deidentify_template_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.UpdateDeidentifyTemplateRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.DeidentifyTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + ) + + response = await client.update_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + +def test_update_deidentify_template_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateDeidentifyTemplateRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.update_deidentify_template), "__call__" + ) as call: + call.return_value = dlp.DeidentifyTemplate() + + client.update_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_deidentify_template_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateDeidentifyTemplateRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_deidentify_template), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.DeidentifyTemplate() + ) + + await client.update_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_update_deidentify_template_flattened(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.update_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_deidentify_template( + name="name_value", + deidentify_template=dlp.DeidentifyTemplate(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].deidentify_template == dlp.DeidentifyTemplate(name="name_value") + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +def test_update_deidentify_template_flattened_error(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_deidentify_template( + dlp.UpdateDeidentifyTemplateRequest(), + name="name_value", + deidentify_template=dlp.DeidentifyTemplate(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_deidentify_template_flattened_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.DeidentifyTemplate() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_deidentify_template( + name="name_value", + deidentify_template=dlp.DeidentifyTemplate(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].deidentify_template == dlp.DeidentifyTemplate(name="name_value") + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +@pytest.mark.asyncio +async def test_update_deidentify_template_flattened_error_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_deidentify_template( + dlp.UpdateDeidentifyTemplateRequest(), + name="name_value", + deidentify_template=dlp.DeidentifyTemplate(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +def test_get_deidentify_template( + transport: str = "grpc", request_type=dlp.GetDeidentifyTemplateRequest +): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + + response = client.get_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.GetDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + +def test_get_deidentify_template_from_dict(): + test_get_deidentify_template(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_deidentify_template_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.GetDeidentifyTemplateRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.DeidentifyTemplate( + name="name_value", + display_name="display_name_value", + description="description_value", + ) + ) + + response = await client.get_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + +def test_get_deidentify_template_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetDeidentifyTemplateRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_deidentify_template), "__call__" + ) as call: + call.return_value = dlp.DeidentifyTemplate() + + client.get_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_deidentify_template_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetDeidentifyTemplateRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_deidentify_template), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.DeidentifyTemplate() + ) + + await client.get_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_deidentify_template_flattened(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_deidentify_template(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_deidentify_template_flattened_error(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_deidentify_template( + dlp.GetDeidentifyTemplateRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_deidentify_template_flattened_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.DeidentifyTemplate() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_deidentify_template(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_deidentify_template_flattened_error_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_deidentify_template( + dlp.GetDeidentifyTemplateRequest(), name="name_value", + ) + + +def test_list_deidentify_templates( + transport: str = "grpc", request_type=dlp.ListDeidentifyTemplatesRequest +): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_deidentify_templates), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDeidentifyTemplatesResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_deidentify_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.ListDeidentifyTemplatesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeidentifyTemplatesPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_deidentify_templates_from_dict(): + test_list_deidentify_templates(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_deidentify_templates_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.ListDeidentifyTemplatesRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_deidentify_templates), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListDeidentifyTemplatesResponse( + next_page_token="next_page_token_value", + ) + ) + + response = await client.list_deidentify_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeidentifyTemplatesAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_deidentify_templates_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListDeidentifyTemplatesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_deidentify_templates), "__call__" + ) as call: + call.return_value = dlp.ListDeidentifyTemplatesResponse() + + client.list_deidentify_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_deidentify_templates_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListDeidentifyTemplatesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_deidentify_templates), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListDeidentifyTemplatesResponse() + ) + + await client.list_deidentify_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_deidentify_templates_flattened(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_deidentify_templates), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDeidentifyTemplatesResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_deidentify_templates(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_list_deidentify_templates_flattened_error(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_deidentify_templates( + dlp.ListDeidentifyTemplatesRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_deidentify_templates_flattened_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_deidentify_templates), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDeidentifyTemplatesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListDeidentifyTemplatesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_deidentify_templates(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_deidentify_templates_flattened_error_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_deidentify_templates( + dlp.ListDeidentifyTemplatesRequest(), parent="parent_value", + ) + + +def test_list_deidentify_templates_pager(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_deidentify_templates), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token="abc", + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], next_page_token="def", + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[dlp.DeidentifyTemplate(),], next_page_token="ghi", + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_deidentify_templates(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, dlp.DeidentifyTemplate) for i in results) + + +def test_list_deidentify_templates_pages(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_deidentify_templates), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token="abc", + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], next_page_token="def", + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[dlp.DeidentifyTemplate(),], next_page_token="ghi", + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + RuntimeError, + ) + pages = list(client.list_deidentify_templates(request={}).pages) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_deidentify_templates_async_pager(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_deidentify_templates), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token="abc", + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], next_page_token="def", + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[dlp.DeidentifyTemplate(),], next_page_token="ghi", + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_deidentify_templates(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.DeidentifyTemplate) for i in responses) + + +@pytest.mark.asyncio +async def test_list_deidentify_templates_async_pages(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_deidentify_templates), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token="abc", + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], next_page_token="def", + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[dlp.DeidentifyTemplate(),], next_page_token="ghi", + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + RuntimeError, + ) + pages = [] + async for page in (await client.list_deidentify_templates(request={})).pages: + pages.append(page) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +def test_delete_deidentify_template( + transport: str = "grpc", request_type=dlp.DeleteDeidentifyTemplateRequest +): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.DeleteDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_deidentify_template_from_dict(): + test_delete_deidentify_template(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_deidentify_template_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.DeleteDeidentifyTemplateRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_deidentify_template_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteDeidentifyTemplateRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_deidentify_template), "__call__" + ) as call: + call.return_value = None + + client.delete_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_deidentify_template_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteDeidentifyTemplateRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_deidentify_template), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_deidentify_template_flattened(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_deidentify_template(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_delete_deidentify_template_flattened_error(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_deidentify_template( + dlp.DeleteDeidentifyTemplateRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_deidentify_template_flattened_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_deidentify_template), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_deidentify_template(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_deidentify_template_flattened_error_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_deidentify_template( + dlp.DeleteDeidentifyTemplateRequest(), name="name_value", + ) + + +def test_create_job_trigger( + transport: str = "grpc", request_type=dlp.CreateJobTriggerRequest +): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger( + name="name_value", + display_name="display_name_value", + description="description_value", + status=dlp.JobTrigger.Status.HEALTHY, + inspect_job=dlp.InspectJobConfig( + storage_config=storage.StorageConfig( + datastore_options=storage.DatastoreOptions( + partition_id=storage.PartitionId(project_id="project_id_value") + ) + ) + ), + ) + + response = client.create_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.CreateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_create_job_trigger_from_dict(): + test_create_job_trigger(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_job_trigger_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.CreateJobTriggerRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.JobTrigger( + name="name_value", + display_name="display_name_value", + description="description_value", + status=dlp.JobTrigger.Status.HEALTHY, + ) + ) + + response = await client.create_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_create_job_trigger_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateJobTriggerRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_job_trigger), "__call__" + ) as call: + call.return_value = dlp.JobTrigger() + + client.create_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateJobTriggerRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_job_trigger), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + + await client.create_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_job_trigger_flattened(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_job_trigger( + parent="parent_value", job_trigger=dlp.JobTrigger(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].job_trigger == dlp.JobTrigger(name="name_value") + + +def test_create_job_trigger_flattened_error(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_job_trigger( + dlp.CreateJobTriggerRequest(), + parent="parent_value", + job_trigger=dlp.JobTrigger(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_job_trigger_flattened_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_job_trigger( + parent="parent_value", job_trigger=dlp.JobTrigger(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].job_trigger == dlp.JobTrigger(name="name_value") + + +@pytest.mark.asyncio +async def test_create_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_job_trigger( + dlp.CreateJobTriggerRequest(), + parent="parent_value", + job_trigger=dlp.JobTrigger(name="name_value"), + ) + + +def test_update_job_trigger( + transport: str = "grpc", request_type=dlp.UpdateJobTriggerRequest +): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.update_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger( + name="name_value", + display_name="display_name_value", + description="description_value", + status=dlp.JobTrigger.Status.HEALTHY, + inspect_job=dlp.InspectJobConfig( + storage_config=storage.StorageConfig( + datastore_options=storage.DatastoreOptions( + partition_id=storage.PartitionId(project_id="project_id_value") + ) + ) + ), + ) + + response = client.update_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.UpdateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_update_job_trigger_from_dict(): + test_update_job_trigger(request_type=dict) + + +@pytest.mark.asyncio +async def test_update_job_trigger_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.UpdateJobTriggerRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.JobTrigger( + name="name_value", + display_name="display_name_value", + description="description_value", + status=dlp.JobTrigger.Status.HEALTHY, + ) + ) + + response = await client.update_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_update_job_trigger_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateJobTriggerRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.update_job_trigger), "__call__" + ) as call: + call.return_value = dlp.JobTrigger() + + client.update_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateJobTriggerRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_job_trigger), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + + await client.update_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_update_job_trigger_flattened(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.update_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_job_trigger( + name="name_value", + job_trigger=dlp.JobTrigger(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].job_trigger == dlp.JobTrigger(name="name_value") + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +def test_update_job_trigger_flattened_error(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_job_trigger( + dlp.UpdateJobTriggerRequest(), + name="name_value", + job_trigger=dlp.JobTrigger(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_job_trigger_flattened_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_job_trigger( + name="name_value", + job_trigger=dlp.JobTrigger(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].job_trigger == dlp.JobTrigger(name="name_value") + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +@pytest.mark.asyncio +async def test_update_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_job_trigger( + dlp.UpdateJobTriggerRequest(), + name="name_value", + job_trigger=dlp.JobTrigger(name="name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +def test_hybrid_inspect_job_trigger( + transport: str = "grpc", request_type=dlp.HybridInspectJobTriggerRequest +): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.hybrid_inspect_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + + response = client.hybrid_inspect_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.HybridInspectJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +def test_hybrid_inspect_job_trigger_from_dict(): + test_hybrid_inspect_job_trigger(request_type=dict) + + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.HybridInspectJobTriggerRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.hybrid_inspect_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.HybridInspectResponse() + ) + + response = await client.hybrid_inspect_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +def test_hybrid_inspect_job_trigger_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.HybridInspectJobTriggerRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.hybrid_inspect_job_trigger), "__call__" + ) as call: + call.return_value = dlp.HybridInspectResponse() + + client.hybrid_inspect_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.HybridInspectJobTriggerRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.hybrid_inspect_job_trigger), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.HybridInspectResponse() + ) + + await client.hybrid_inspect_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_hybrid_inspect_job_trigger_flattened(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.hybrid_inspect_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.hybrid_inspect_job_trigger(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_hybrid_inspect_job_trigger_flattened_error(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.hybrid_inspect_job_trigger( + dlp.HybridInspectJobTriggerRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_flattened_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.hybrid_inspect_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.HybridInspectResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.hybrid_inspect_job_trigger(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.hybrid_inspect_job_trigger( + dlp.HybridInspectJobTriggerRequest(), name="name_value", + ) + + +def test_get_job_trigger( + transport: str = "grpc", request_type=dlp.GetJobTriggerRequest +): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_job_trigger), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger( + name="name_value", + display_name="display_name_value", + description="description_value", + status=dlp.JobTrigger.Status.HEALTHY, + inspect_job=dlp.InspectJobConfig( + storage_config=storage.StorageConfig( + datastore_options=storage.DatastoreOptions( + partition_id=storage.PartitionId(project_id="project_id_value") + ) + ) + ), + ) + + response = client.get_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.GetJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_get_job_trigger_from_dict(): + test_get_job_trigger(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_job_trigger_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.GetJobTriggerRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.JobTrigger( + name="name_value", + display_name="display_name_value", + description="description_value", + status=dlp.JobTrigger.Status.HEALTHY, + ) + ) + + response = await client.get_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.description == "description_value" + + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_get_job_trigger_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetJobTriggerRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_job_trigger), "__call__") as call: + call.return_value = dlp.JobTrigger() + + client.get_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetJobTriggerRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_job_trigger), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + + await client.get_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_job_trigger_flattened(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_job_trigger), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_job_trigger(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_job_trigger_flattened_error(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job_trigger( + dlp.GetJobTriggerRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_job_trigger_flattened_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_job_trigger(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_job_trigger( + dlp.GetJobTriggerRequest(), name="name_value", + ) + + +def test_list_job_triggers( + transport: str = "grpc", request_type=dlp.ListJobTriggersRequest +): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_job_triggers), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListJobTriggersResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_job_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.ListJobTriggersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobTriggersPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_job_triggers_from_dict(): + test_list_job_triggers(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_job_triggers_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.ListJobTriggersRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_job_triggers), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListJobTriggersResponse(next_page_token="next_page_token_value",) + ) + + response = await client.list_job_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobTriggersAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_job_triggers_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListJobTriggersRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_job_triggers), "__call__" + ) as call: + call.return_value = dlp.ListJobTriggersResponse() + + client.list_job_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_job_triggers_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListJobTriggersRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_job_triggers), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListJobTriggersResponse() + ) + + await client.list_job_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_job_triggers_flattened(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_job_triggers), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListJobTriggersResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_job_triggers(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_list_job_triggers_flattened_error(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_job_triggers( + dlp.ListJobTriggersRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_job_triggers_flattened_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_job_triggers), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListJobTriggersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListJobTriggersResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_job_triggers(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_job_triggers_flattened_error_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_job_triggers( + dlp.ListJobTriggersRequest(), parent="parent_value", + ) + + +def test_list_job_triggers_pager(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_job_triggers), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListJobTriggersResponse( + job_triggers=[dlp.JobTrigger(), dlp.JobTrigger(), dlp.JobTrigger(),], + next_page_token="abc", + ), + dlp.ListJobTriggersResponse(job_triggers=[], next_page_token="def",), + dlp.ListJobTriggersResponse( + job_triggers=[dlp.JobTrigger(),], next_page_token="ghi", + ), + dlp.ListJobTriggersResponse( + job_triggers=[dlp.JobTrigger(), dlp.JobTrigger(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_job_triggers(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, dlp.JobTrigger) for i in results) + + +def test_list_job_triggers_pages(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_job_triggers), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListJobTriggersResponse( + job_triggers=[dlp.JobTrigger(), dlp.JobTrigger(), dlp.JobTrigger(),], + next_page_token="abc", + ), + dlp.ListJobTriggersResponse(job_triggers=[], next_page_token="def",), + dlp.ListJobTriggersResponse( + job_triggers=[dlp.JobTrigger(),], next_page_token="ghi", + ), + dlp.ListJobTriggersResponse( + job_triggers=[dlp.JobTrigger(), dlp.JobTrigger(),], + ), + RuntimeError, + ) + pages = list(client.list_job_triggers(request={}).pages) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_job_triggers_async_pager(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_job_triggers), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListJobTriggersResponse( + job_triggers=[dlp.JobTrigger(), dlp.JobTrigger(), dlp.JobTrigger(),], + next_page_token="abc", + ), + dlp.ListJobTriggersResponse(job_triggers=[], next_page_token="def",), + dlp.ListJobTriggersResponse( + job_triggers=[dlp.JobTrigger(),], next_page_token="ghi", + ), + dlp.ListJobTriggersResponse( + job_triggers=[dlp.JobTrigger(), dlp.JobTrigger(),], + ), + RuntimeError, + ) + async_pager = await client.list_job_triggers(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.JobTrigger) for i in responses) + + +@pytest.mark.asyncio +async def test_list_job_triggers_async_pages(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_job_triggers), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListJobTriggersResponse( + job_triggers=[dlp.JobTrigger(), dlp.JobTrigger(), dlp.JobTrigger(),], + next_page_token="abc", + ), + dlp.ListJobTriggersResponse(job_triggers=[], next_page_token="def",), + dlp.ListJobTriggersResponse( + job_triggers=[dlp.JobTrigger(),], next_page_token="ghi", + ), + dlp.ListJobTriggersResponse( + job_triggers=[dlp.JobTrigger(), dlp.JobTrigger(),], + ), + RuntimeError, + ) + pages = [] + async for page in (await client.list_job_triggers(request={})).pages: + pages.append(page) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +def test_delete_job_trigger( + transport: str = "grpc", request_type=dlp.DeleteJobTriggerRequest +): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.DeleteJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_job_trigger_from_dict(): + test_delete_job_trigger(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_job_trigger_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.DeleteJobTriggerRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_job_trigger_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteJobTriggerRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_job_trigger), "__call__" + ) as call: + call.return_value = None + + client.delete_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteJobTriggerRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_job_trigger), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_job_trigger_flattened(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_job_trigger(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_delete_job_trigger_flattened_error(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_job_trigger( + dlp.DeleteJobTriggerRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_job_trigger_flattened_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_job_trigger(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_job_trigger( + dlp.DeleteJobTriggerRequest(), name="name_value", + ) + + +def test_activate_job_trigger( + transport: str = "grpc", request_type=dlp.ActivateJobTriggerRequest +): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.activate_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob( + name="name_value", + type=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name="job_trigger_name_value", + risk_details=dlp.AnalyzeDataSourceRiskDetails( + requested_privacy_metric=dlp.PrivacyMetric( + numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig( + field=storage.FieldId(name="name_value") + ) + ) + ), + ) + + response = client.activate_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.ActivateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + + assert response.name == "name_value" + + assert response.type == dlp.DlpJobType.INSPECT_JOB + + assert response.state == dlp.DlpJob.JobState.PENDING + + assert response.job_trigger_name == "job_trigger_name_value" + + +def test_activate_job_trigger_from_dict(): + test_activate_job_trigger(request_type=dict) + + +@pytest.mark.asyncio +async def test_activate_job_trigger_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.ActivateJobTriggerRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.activate_job_trigger), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.DlpJob( + name="name_value", + type=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name="job_trigger_name_value", + ) + ) + + response = await client.activate_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + + assert response.name == "name_value" + + assert response.type == dlp.DlpJobType.INSPECT_JOB + + assert response.state == dlp.DlpJob.JobState.PENDING + + assert response.job_trigger_name == "job_trigger_name_value" + + +def test_activate_job_trigger_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ActivateJobTriggerRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.activate_job_trigger), "__call__" + ) as call: + call.return_value = dlp.DlpJob() + + client.activate_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_activate_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ActivateJobTriggerRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.activate_job_trigger), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + + await client.activate_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_create_dlp_job(transport: str = "grpc", request_type=dlp.CreateDlpJobRequest): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_dlp_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob( + name="name_value", + type=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name="job_trigger_name_value", + risk_details=dlp.AnalyzeDataSourceRiskDetails( + requested_privacy_metric=dlp.PrivacyMetric( + numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig( + field=storage.FieldId(name="name_value") + ) + ) + ), + ) + + response = client.create_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.CreateDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + + assert response.name == "name_value" + + assert response.type == dlp.DlpJobType.INSPECT_JOB + + assert response.state == dlp.DlpJob.JobState.PENDING + + assert response.job_trigger_name == "job_trigger_name_value" + + +def test_create_dlp_job_from_dict(): + test_create_dlp_job(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_dlp_job_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.CreateDlpJobRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_dlp_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.DlpJob( + name="name_value", + type=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name="job_trigger_name_value", + ) + ) + + response = await client.create_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + + assert response.name == "name_value" + + assert response.type == dlp.DlpJobType.INSPECT_JOB + + assert response.state == dlp.DlpJob.JobState.PENDING + + assert response.job_trigger_name == "job_trigger_name_value" + + +def test_create_dlp_job_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateDlpJobRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_dlp_job), "__call__") as call: + call.return_value = dlp.DlpJob() + + client.create_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateDlpJobRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_dlp_job), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + + await client.create_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_dlp_job_flattened(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.create_dlp_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_dlp_job( + parent="parent_value", + inspect_job=dlp.InspectJobConfig( + storage_config=storage.StorageConfig( + datastore_options=storage.DatastoreOptions( + partition_id=storage.PartitionId(project_id="project_id_value") + ) + ) + ), + risk_job=dlp.RiskAnalysisJobConfig( + privacy_metric=dlp.PrivacyMetric( + numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig( + field=storage.FieldId(name="name_value") + ) + ) + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].risk_job == dlp.RiskAnalysisJobConfig( + privacy_metric=dlp.PrivacyMetric( + numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig( + field=storage.FieldId(name="name_value") + ) + ) + ) + + +def test_create_dlp_job_flattened_error(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_dlp_job( + dlp.CreateDlpJobRequest(), + parent="parent_value", + inspect_job=dlp.InspectJobConfig( + storage_config=storage.StorageConfig( + datastore_options=storage.DatastoreOptions( + partition_id=storage.PartitionId(project_id="project_id_value") + ) + ) + ), + risk_job=dlp.RiskAnalysisJobConfig( + privacy_metric=dlp.PrivacyMetric( + numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig( + field=storage.FieldId(name="name_value") + ) + ) + ), + ) + + +@pytest.mark.asyncio +async def test_create_dlp_job_flattened_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_dlp_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_dlp_job( + parent="parent_value", + inspect_job=dlp.InspectJobConfig( + storage_config=storage.StorageConfig( + datastore_options=storage.DatastoreOptions( + partition_id=storage.PartitionId(project_id="project_id_value") + ) + ) + ), + risk_job=dlp.RiskAnalysisJobConfig( + privacy_metric=dlp.PrivacyMetric( + numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig( + field=storage.FieldId(name="name_value") + ) + ) + ), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].risk_job == dlp.RiskAnalysisJobConfig( + privacy_metric=dlp.PrivacyMetric( + numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig( + field=storage.FieldId(name="name_value") + ) + ) + ) + + +@pytest.mark.asyncio +async def test_create_dlp_job_flattened_error_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_dlp_job( + dlp.CreateDlpJobRequest(), + parent="parent_value", + inspect_job=dlp.InspectJobConfig( + storage_config=storage.StorageConfig( + datastore_options=storage.DatastoreOptions( + partition_id=storage.PartitionId(project_id="project_id_value") + ) + ) + ), + risk_job=dlp.RiskAnalysisJobConfig( + privacy_metric=dlp.PrivacyMetric( + numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig( + field=storage.FieldId(name="name_value") + ) + ) + ), + ) + + +def test_list_dlp_jobs(transport: str = "grpc", request_type=dlp.ListDlpJobsRequest): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_dlp_jobs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDlpJobsResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_dlp_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.ListDlpJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDlpJobsPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_dlp_jobs_from_dict(): + test_list_dlp_jobs(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_dlp_jobs_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.ListDlpJobsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_dlp_jobs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListDlpJobsResponse(next_page_token="next_page_token_value",) + ) + + response = await client.list_dlp_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDlpJobsAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_dlp_jobs_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListDlpJobsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_dlp_jobs), "__call__") as call: + call.return_value = dlp.ListDlpJobsResponse() + + client.list_dlp_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_dlp_jobs_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListDlpJobsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_dlp_jobs), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListDlpJobsResponse() + ) + + await client.list_dlp_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_dlp_jobs_flattened(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_dlp_jobs), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDlpJobsResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_dlp_jobs(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_list_dlp_jobs_flattened_error(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_dlp_jobs( + dlp.ListDlpJobsRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_dlp_jobs_flattened_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_dlp_jobs), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDlpJobsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListDlpJobsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_dlp_jobs(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_dlp_jobs_flattened_error_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_dlp_jobs( + dlp.ListDlpJobsRequest(), parent="parent_value", + ) + + +def test_list_dlp_jobs_pager(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_dlp_jobs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDlpJobsResponse( + jobs=[dlp.DlpJob(), dlp.DlpJob(), dlp.DlpJob(),], next_page_token="abc", + ), + dlp.ListDlpJobsResponse(jobs=[], next_page_token="def",), + dlp.ListDlpJobsResponse(jobs=[dlp.DlpJob(),], next_page_token="ghi",), + dlp.ListDlpJobsResponse(jobs=[dlp.DlpJob(), dlp.DlpJob(),],), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_dlp_jobs(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, dlp.DlpJob) for i in results) + + +def test_list_dlp_jobs_pages(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_dlp_jobs), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDlpJobsResponse( + jobs=[dlp.DlpJob(), dlp.DlpJob(), dlp.DlpJob(),], next_page_token="abc", + ), + dlp.ListDlpJobsResponse(jobs=[], next_page_token="def",), + dlp.ListDlpJobsResponse(jobs=[dlp.DlpJob(),], next_page_token="ghi",), + dlp.ListDlpJobsResponse(jobs=[dlp.DlpJob(), dlp.DlpJob(),],), + RuntimeError, + ) + pages = list(client.list_dlp_jobs(request={}).pages) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_dlp_jobs_async_pager(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_dlp_jobs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDlpJobsResponse( + jobs=[dlp.DlpJob(), dlp.DlpJob(), dlp.DlpJob(),], next_page_token="abc", + ), + dlp.ListDlpJobsResponse(jobs=[], next_page_token="def",), + dlp.ListDlpJobsResponse(jobs=[dlp.DlpJob(),], next_page_token="ghi",), + dlp.ListDlpJobsResponse(jobs=[dlp.DlpJob(), dlp.DlpJob(),],), + RuntimeError, + ) + async_pager = await client.list_dlp_jobs(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.DlpJob) for i in responses) + + +@pytest.mark.asyncio +async def test_list_dlp_jobs_async_pages(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_dlp_jobs), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDlpJobsResponse( + jobs=[dlp.DlpJob(), dlp.DlpJob(), dlp.DlpJob(),], next_page_token="abc", + ), + dlp.ListDlpJobsResponse(jobs=[], next_page_token="def",), + dlp.ListDlpJobsResponse(jobs=[dlp.DlpJob(),], next_page_token="ghi",), + dlp.ListDlpJobsResponse(jobs=[dlp.DlpJob(), dlp.DlpJob(),],), + RuntimeError, + ) + pages = [] + async for page in (await client.list_dlp_jobs(request={})).pages: + pages.append(page) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +def test_get_dlp_job(transport: str = "grpc", request_type=dlp.GetDlpJobRequest): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_dlp_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob( + name="name_value", + type=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name="job_trigger_name_value", + risk_details=dlp.AnalyzeDataSourceRiskDetails( + requested_privacy_metric=dlp.PrivacyMetric( + numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig( + field=storage.FieldId(name="name_value") + ) + ) + ), + ) + + response = client.get_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.GetDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + + assert response.name == "name_value" + + assert response.type == dlp.DlpJobType.INSPECT_JOB + + assert response.state == dlp.DlpJob.JobState.PENDING + + assert response.job_trigger_name == "job_trigger_name_value" + + +def test_get_dlp_job_from_dict(): + test_get_dlp_job(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_dlp_job_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.GetDlpJobRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_dlp_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.DlpJob( + name="name_value", + type=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name="job_trigger_name_value", + ) + ) + + response = await client.get_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + + assert response.name == "name_value" + + assert response.type == dlp.DlpJobType.INSPECT_JOB + + assert response.state == dlp.DlpJob.JobState.PENDING + + assert response.job_trigger_name == "job_trigger_name_value" + + +def test_get_dlp_job_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetDlpJobRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_dlp_job), "__call__") as call: + call.return_value = dlp.DlpJob() + + client.get_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetDlpJobRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_dlp_job), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + + await client.get_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_dlp_job_flattened(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_dlp_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_dlp_job(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_dlp_job_flattened_error(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_dlp_job( + dlp.GetDlpJobRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_dlp_job_flattened_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_dlp_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_dlp_job(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_dlp_job_flattened_error_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_dlp_job( + dlp.GetDlpJobRequest(), name="name_value", + ) + + +def test_delete_dlp_job(transport: str = "grpc", request_type=dlp.DeleteDlpJobRequest): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_dlp_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.DeleteDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_dlp_job_from_dict(): + test_delete_dlp_job(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_dlp_job_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.DeleteDlpJobRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_dlp_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_dlp_job_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteDlpJobRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_dlp_job), "__call__") as call: + call.return_value = None + + client.delete_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteDlpJobRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_dlp_job), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_dlp_job_flattened(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.delete_dlp_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_dlp_job(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_delete_dlp_job_flattened_error(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_dlp_job( + dlp.DeleteDlpJobRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_dlp_job_flattened_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_dlp_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_dlp_job(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_dlp_job_flattened_error_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_dlp_job( + dlp.DeleteDlpJobRequest(), name="name_value", + ) + + +def test_cancel_dlp_job(transport: str = "grpc", request_type=dlp.CancelDlpJobRequest): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.cancel_dlp_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.CancelDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_dlp_job_from_dict(): + test_cancel_dlp_job(request_type=dict) + + +@pytest.mark.asyncio +async def test_cancel_dlp_job_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.CancelDlpJobRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.cancel_dlp_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.cancel_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_dlp_job_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CancelDlpJobRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.cancel_dlp_job), "__call__") as call: + call.return_value = None + + client.cancel_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CancelDlpJobRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.cancel_dlp_job), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.cancel_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_create_stored_info_type( + transport: str = "grpc", request_type=dlp.CreateStoredInfoTypeRequest +): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType(name="name_value",) + + response = client.create_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.CreateStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + + assert response.name == "name_value" + + +def test_create_stored_info_type_from_dict(): + test_create_stored_info_type(request_type=dict) + + +@pytest.mark.asyncio +async def test_create_stored_info_type_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.CreateStoredInfoTypeRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.StoredInfoType(name="name_value",) + ) + + response = await client.create_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + + assert response.name == "name_value" + + +def test_create_stored_info_type_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateStoredInfoTypeRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_stored_info_type), "__call__" + ) as call: + call.return_value = dlp.StoredInfoType() + + client.create_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_stored_info_type_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateStoredInfoTypeRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_stored_info_type), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + + await client.create_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_create_stored_info_type_flattened(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_stored_info_type( + parent="parent_value", + config=dlp.StoredInfoTypeConfig(display_name="display_name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].config == dlp.StoredInfoTypeConfig( + display_name="display_name_value" + ) + + +def test_create_stored_info_type_flattened_error(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_stored_info_type( + dlp.CreateStoredInfoTypeRequest(), + parent="parent_value", + config=dlp.StoredInfoTypeConfig(display_name="display_name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_stored_info_type_flattened_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_stored_info_type( + parent="parent_value", + config=dlp.StoredInfoTypeConfig(display_name="display_name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + assert args[0].config == dlp.StoredInfoTypeConfig( + display_name="display_name_value" + ) + + +@pytest.mark.asyncio +async def test_create_stored_info_type_flattened_error_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_stored_info_type( + dlp.CreateStoredInfoTypeRequest(), + parent="parent_value", + config=dlp.StoredInfoTypeConfig(display_name="display_name_value"), + ) + + +def test_update_stored_info_type( + transport: str = "grpc", request_type=dlp.UpdateStoredInfoTypeRequest +): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.update_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType(name="name_value",) + + response = client.update_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.UpdateStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + + assert response.name == "name_value" + + +def test_update_stored_info_type_from_dict(): + test_update_stored_info_type(request_type=dict) + + +@pytest.mark.asyncio +async def test_update_stored_info_type_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.UpdateStoredInfoTypeRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.StoredInfoType(name="name_value",) + ) + + response = await client.update_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + + assert response.name == "name_value" + + +def test_update_stored_info_type_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateStoredInfoTypeRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.update_stored_info_type), "__call__" + ) as call: + call.return_value = dlp.StoredInfoType() + + client.update_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_stored_info_type_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateStoredInfoTypeRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_stored_info_type), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + + await client.update_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_update_stored_info_type_flattened(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.update_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_stored_info_type( + name="name_value", + config=dlp.StoredInfoTypeConfig(display_name="display_name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].config == dlp.StoredInfoTypeConfig( + display_name="display_name_value" + ) + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +def test_update_stored_info_type_flattened_error(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_stored_info_type( + dlp.UpdateStoredInfoTypeRequest(), + name="name_value", + config=dlp.StoredInfoTypeConfig(display_name="display_name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_stored_info_type_flattened_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_stored_info_type( + name="name_value", + config=dlp.StoredInfoTypeConfig(display_name="display_name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + assert args[0].config == dlp.StoredInfoTypeConfig( + display_name="display_name_value" + ) + + assert args[0].update_mask == field_mask.FieldMask(paths=["paths_value"]) + + +@pytest.mark.asyncio +async def test_update_stored_info_type_flattened_error_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_stored_info_type( + dlp.UpdateStoredInfoTypeRequest(), + name="name_value", + config=dlp.StoredInfoTypeConfig(display_name="display_name_value"), + update_mask=field_mask.FieldMask(paths=["paths_value"]), + ) + + +def test_get_stored_info_type( + transport: str = "grpc", request_type=dlp.GetStoredInfoTypeRequest +): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType(name="name_value",) + + response = client.get_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.GetStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + + assert response.name == "name_value" + + +def test_get_stored_info_type_from_dict(): + test_get_stored_info_type(request_type=dict) + + +@pytest.mark.asyncio +async def test_get_stored_info_type_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.GetStoredInfoTypeRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.StoredInfoType(name="name_value",) + ) + + response = await client.get_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + + assert response.name == "name_value" + + +def test_get_stored_info_type_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetStoredInfoTypeRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_stored_info_type), "__call__" + ) as call: + call.return_value = dlp.StoredInfoType() + + client.get_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_stored_info_type_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetStoredInfoTypeRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_stored_info_type), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + + await client.get_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_get_stored_info_type_flattened(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.get_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_stored_info_type(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_get_stored_info_type_flattened_error(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_stored_info_type( + dlp.GetStoredInfoTypeRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_stored_info_type_flattened_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_stored_info_type(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_get_stored_info_type_flattened_error_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_stored_info_type( + dlp.GetStoredInfoTypeRequest(), name="name_value", + ) + + +def test_list_stored_info_types( + transport: str = "grpc", request_type=dlp.ListStoredInfoTypesRequest +): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_stored_info_types), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListStoredInfoTypesResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_stored_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.ListStoredInfoTypesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListStoredInfoTypesPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_stored_info_types_from_dict(): + test_list_stored_info_types(request_type=dict) + + +@pytest.mark.asyncio +async def test_list_stored_info_types_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.ListStoredInfoTypesRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_stored_info_types), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListStoredInfoTypesResponse(next_page_token="next_page_token_value",) + ) + + response = await client.list_stored_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListStoredInfoTypesAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_stored_info_types_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListStoredInfoTypesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_stored_info_types), "__call__" + ) as call: + call.return_value = dlp.ListStoredInfoTypesResponse() + + client.list_stored_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_stored_info_types_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListStoredInfoTypesRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_stored_info_types), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListStoredInfoTypesResponse() + ) + + await client.list_stored_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_stored_info_types_flattened(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_stored_info_types), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListStoredInfoTypesResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_stored_info_types(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +def test_list_stored_info_types_flattened_error(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_stored_info_types( + dlp.ListStoredInfoTypesRequest(), parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_stored_info_types_flattened_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_stored_info_types), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListStoredInfoTypesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.ListStoredInfoTypesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_stored_info_types(parent="parent_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].parent == "parent_value" + + +@pytest.mark.asyncio +async def test_list_stored_info_types_flattened_error_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_stored_info_types( + dlp.ListStoredInfoTypesRequest(), parent="parent_value", + ) + + +def test_list_stored_info_types_pager(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_stored_info_types), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token="abc", + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], next_page_token="def", + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[dlp.StoredInfoType(),], next_page_token="ghi", + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[dlp.StoredInfoType(), dlp.StoredInfoType(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_stored_info_types(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, dlp.StoredInfoType) for i in results) + + +def test_list_stored_info_types_pages(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.list_stored_info_types), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token="abc", + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], next_page_token="def", + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[dlp.StoredInfoType(),], next_page_token="ghi", + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[dlp.StoredInfoType(), dlp.StoredInfoType(),], + ), + RuntimeError, + ) + pages = list(client.list_stored_info_types(request={}).pages) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_stored_info_types_async_pager(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_stored_info_types), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token="abc", + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], next_page_token="def", + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[dlp.StoredInfoType(),], next_page_token="ghi", + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[dlp.StoredInfoType(), dlp.StoredInfoType(),], + ), + RuntimeError, + ) + async_pager = await client.list_stored_info_types(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.StoredInfoType) for i in responses) + + +@pytest.mark.asyncio +async def test_list_stored_info_types_async_pages(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_stored_info_types), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token="abc", + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], next_page_token="def", + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[dlp.StoredInfoType(),], next_page_token="ghi", + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[dlp.StoredInfoType(), dlp.StoredInfoType(),], + ), + RuntimeError, + ) + pages = [] + async for page in (await client.list_stored_info_types(request={})).pages: + pages.append(page) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +def test_delete_stored_info_type( + transport: str = "grpc", request_type=dlp.DeleteStoredInfoTypeRequest +): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.DeleteStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_stored_info_type_from_dict(): + test_delete_stored_info_type(request_type=dict) + + +@pytest.mark.asyncio +async def test_delete_stored_info_type_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.DeleteStoredInfoTypeRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_stored_info_type_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteStoredInfoTypeRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_stored_info_type), "__call__" + ) as call: + call.return_value = None + + client.delete_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_stored_info_type_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteStoredInfoTypeRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_stored_info_type), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_stored_info_type_flattened(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_stored_info_type(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_delete_stored_info_type_flattened_error(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_stored_info_type( + dlp.DeleteStoredInfoTypeRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_stored_info_type_flattened_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_stored_info_type), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_stored_info_type(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_delete_stored_info_type_flattened_error_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_stored_info_type( + dlp.DeleteStoredInfoTypeRequest(), name="name_value", + ) + + +def test_hybrid_inspect_dlp_job( + transport: str = "grpc", request_type=dlp.HybridInspectDlpJobRequest +): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.hybrid_inspect_dlp_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + + response = client.hybrid_inspect_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.HybridInspectDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +def test_hybrid_inspect_dlp_job_from_dict(): + test_hybrid_inspect_dlp_job(request_type=dict) + + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.HybridInspectDlpJobRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.hybrid_inspect_dlp_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.HybridInspectResponse() + ) + + response = await client.hybrid_inspect_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +def test_hybrid_inspect_dlp_job_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.HybridInspectDlpJobRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.hybrid_inspect_dlp_job), "__call__" + ) as call: + call.return_value = dlp.HybridInspectResponse() + + client.hybrid_inspect_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.HybridInspectDlpJobRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.hybrid_inspect_dlp_job), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.HybridInspectResponse() + ) + + await client.hybrid_inspect_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_hybrid_inspect_dlp_job_flattened(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.hybrid_inspect_dlp_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.hybrid_inspect_dlp_job(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +def test_hybrid_inspect_dlp_job_flattened_error(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.hybrid_inspect_dlp_job( + dlp.HybridInspectDlpJobRequest(), name="name_value", + ) + + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_flattened_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.hybrid_inspect_dlp_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dlp.HybridInspectResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.hybrid_inspect_dlp_job(name="name_value",) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0].name == "name_value" + + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_flattened_error_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.hybrid_inspect_dlp_job( + dlp.HybridInspectDlpJobRequest(), name="name_value", + ) + + +def test_finish_dlp_job(transport: str = "grpc", request_type=dlp.FinishDlpJobRequest): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.finish_dlp_job), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.finish_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == dlp.FinishDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_finish_dlp_job_from_dict(): + test_finish_dlp_job(request_type=dict) + + +@pytest.mark.asyncio +async def test_finish_dlp_job_async(transport: str = "grpc_asyncio"): + client = DlpServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dlp.FinishDlpJobRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.finish_dlp_job), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.finish_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_finish_dlp_job_field_headers(): + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.FinishDlpJobRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.finish_dlp_job), "__call__") as call: + call.return_value = None + + client.finish_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_finish_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.FinishDlpJobRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.finish_dlp_job), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.finish_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DlpServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DlpServiceClient( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + client = DlpServiceClient(transport=transport) + assert client._transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DlpServiceGrpcAsyncIOTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DlpServiceClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client._transport, transports.DlpServiceGrpcTransport,) + + +def test_dlp_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(exceptions.DuplicateCredentialArgs): + transport = transports.DlpServiceTransport( + credentials=credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_dlp_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DlpServiceTransport( + credentials=credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "inspect_content", + "redact_image", + "deidentify_content", + "reidentify_content", + "list_info_types", + "create_inspect_template", + "update_inspect_template", + "get_inspect_template", + "list_inspect_templates", + "delete_inspect_template", + "create_deidentify_template", + "update_deidentify_template", + "get_deidentify_template", + "list_deidentify_templates", + "delete_deidentify_template", + "create_job_trigger", + "update_job_trigger", + "hybrid_inspect_job_trigger", + "get_job_trigger", + "list_job_triggers", + "delete_job_trigger", + "activate_job_trigger", + "create_dlp_job", + "list_dlp_jobs", + "get_dlp_job", + "delete_dlp_job", + "cancel_dlp_job", + "create_stored_info_type", + "update_stored_info_type", + "get_stored_info_type", + "list_stored_info_types", + "delete_stored_info_type", + "hybrid_inspect_dlp_job", + "finish_dlp_job", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +def test_dlp_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + auth, "load_credentials_from_file" + ) as load_creds, mock.patch( + "google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.DlpServiceTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_dlp_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + DlpServiceClient() + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +def test_dlp_service_transport_auth_adc(): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.DlpServiceGrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" + ) + adc.assert_called_once_with( + scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_dlp_service_host_no_port(): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint="dlp.googleapis.com"), + ) + assert client._transport._host == "dlp.googleapis.com:443" + + +def test_dlp_service_host_with_port(): + client = DlpServiceClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="dlp.googleapis.com:8000" + ), + ) + assert client._transport._host == "dlp.googleapis.com:8000" + + +def test_dlp_service_grpc_transport_channel(): + channel = grpc.insecure_channel("http://localhost/") + + # Check that if channel is provided, mtls endpoint and client_cert_source + # won't be used. + callback = mock.MagicMock() + transport = transports.DlpServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=callback, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert not callback.called + + +def test_dlp_service_grpc_asyncio_transport_channel(): + channel = aio.insecure_channel("http://localhost/") + + # Check that if channel is provided, mtls endpoint and client_cert_source + # won't be used. + callback = mock.MagicMock() + transport = transports.DlpServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=callback, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert not callback.called + + +@mock.patch("grpc.ssl_channel_credentials", autospec=True) +@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) +def test_dlp_service_grpc_transport_channel_mtls_with_client_cert_source( + grpc_create_channel, grpc_ssl_channel_cred +): + # Check that if channel is None, but api_mtls_endpoint and client_cert_source + # are provided, then a mTLS channel will be created. + mock_cred = mock.Mock() + + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + transport = transports.DlpServiceGrpcTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@mock.patch("grpc.ssl_channel_credentials", autospec=True) +@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) +def test_dlp_service_grpc_asyncio_transport_channel_mtls_with_client_cert_source( + grpc_create_channel, grpc_ssl_channel_cred +): + # Check that if channel is None, but api_mtls_endpoint and client_cert_source + # are provided, then a mTLS channel will be created. + mock_cred = mock.Mock() + + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + transport = transports.DlpServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@pytest.mark.parametrize( + "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] +) +@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) +def test_dlp_service_grpc_transport_channel_mtls_with_adc( + grpc_create_channel, api_mtls_endpoint +): + # Check that if channel and client_cert_source are None, but api_mtls_endpoint + # is provided, then a mTLS channel will be created with SSL ADC. + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + # Mock google.auth.transport.grpc.SslCredentials class. + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + mock_cred = mock.Mock() + transport = transports.DlpServiceGrpcTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint=api_mtls_endpoint, + client_cert_source=None, + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@pytest.mark.parametrize( + "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] +) +@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) +def test_dlp_service_grpc_asyncio_transport_channel_mtls_with_adc( + grpc_create_channel, api_mtls_endpoint +): + # Check that if channel and client_cert_source are None, but api_mtls_endpoint + # is provided, then a mTLS channel will be created with SSL ADC. + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + # Mock google.auth.transport.grpc.SslCredentials class. + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + mock_cred = mock.Mock() + transport = transports.DlpServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint=api_mtls_endpoint, + client_cert_source=None, + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=("https://www.googleapis.com/auth/cloud-platform",), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_job_trigger_path(): + project = "squid" + job_trigger = "clam" + + expected = "projects/{project}/jobTriggers/{job_trigger}".format( + project=project, job_trigger=job_trigger, + ) + actual = DlpServiceClient.job_trigger_path(project, job_trigger) + assert expected == actual + + +def test_parse_job_trigger_path(): + expected = { + "project": "whelk", + "job_trigger": "octopus", + } + path = DlpServiceClient.job_trigger_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_job_trigger_path(path) + assert expected == actual + + +def test_inspect_template_path(): + organization = "squid" + inspect_template = "clam" + + expected = "organizations/{organization}/inspectTemplates/{inspect_template}".format( + organization=organization, inspect_template=inspect_template, + ) + actual = DlpServiceClient.inspect_template_path(organization, inspect_template) + assert expected == actual + + +def test_parse_inspect_template_path(): + expected = { + "organization": "whelk", + "inspect_template": "octopus", + } + path = DlpServiceClient.inspect_template_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_inspect_template_path(path) + assert expected == actual + + +def test_deidentify_template_path(): + organization = "squid" + deidentify_template = "clam" + + expected = "organizations/{organization}/deidentifyTemplates/{deidentify_template}".format( + organization=organization, deidentify_template=deidentify_template, + ) + actual = DlpServiceClient.deidentify_template_path( + organization, deidentify_template + ) + assert expected == actual + + +def test_parse_deidentify_template_path(): + expected = { + "organization": "whelk", + "deidentify_template": "octopus", + } + path = DlpServiceClient.deidentify_template_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_deidentify_template_path(path) + assert expected == actual diff --git a/tests/unit/gapic/v2/test_dlp_service_client_v2.py b/tests/unit/gapic/v2/test_dlp_service_client_v2.py deleted file mode 100644 index 6eb1c1c1..00000000 --- a/tests/unit/gapic/v2/test_dlp_service_client_v2.py +++ /dev/null @@ -1,1386 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.cloud import dlp_v2 -from google.cloud.dlp_v2.proto import dlp_pb2 -from google.protobuf import empty_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestDlpServiceClient(object): - def test_inspect_content(self): - # Setup Expected Response - expected_response = {} - expected_response = dlp_pb2.InspectContentResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - response = client.inspect_content() - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.InspectContentRequest() - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_inspect_content_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - with pytest.raises(CustomException): - client.inspect_content() - - def test_redact_image(self): - # Setup Expected Response - redacted_image = b"28" - extracted_text = "extractedText998260012" - expected_response = { - "redacted_image": redacted_image, - "extracted_text": extracted_text, - } - expected_response = dlp_pb2.RedactImageResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - response = client.redact_image() - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.RedactImageRequest() - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_redact_image_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - with pytest.raises(CustomException): - client.redact_image() - - def test_deidentify_content(self): - # Setup Expected Response - expected_response = {} - expected_response = dlp_pb2.DeidentifyContentResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - response = client.deidentify_content() - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.DeidentifyContentRequest() - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_deidentify_content_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - with pytest.raises(CustomException): - client.deidentify_content() - - def test_reidentify_content(self): - # Setup Expected Response - expected_response = {} - expected_response = dlp_pb2.ReidentifyContentResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup Request - parent = client.project_path("[PROJECT]") - - response = client.reidentify_content(parent) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.ReidentifyContentRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_reidentify_content_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup request - parent = client.project_path("[PROJECT]") - - with pytest.raises(CustomException): - client.reidentify_content(parent) - - def test_list_info_types(self): - # Setup Expected Response - expected_response = {} - expected_response = dlp_pb2.ListInfoTypesResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - response = client.list_info_types() - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.ListInfoTypesRequest() - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_info_types_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - with pytest.raises(CustomException): - client.list_info_types() - - def test_create_inspect_template(self): - # Setup Expected Response - name = "name3373707" - display_name = "displayName1615086568" - description = "description-1724546052" - expected_response = { - "name": name, - "display_name": display_name, - "description": description, - } - expected_response = dlp_pb2.InspectTemplate(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup Request - parent = client.organization_location_path("[ORGANIZATION]", "[LOCATION]") - inspect_template = {} - - response = client.create_inspect_template(parent, inspect_template) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.CreateInspectTemplateRequest( - parent=parent, inspect_template=inspect_template - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_inspect_template_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup request - parent = client.organization_location_path("[ORGANIZATION]", "[LOCATION]") - inspect_template = {} - - with pytest.raises(CustomException): - client.create_inspect_template(parent, inspect_template) - - def test_update_inspect_template(self): - # Setup Expected Response - name_2 = "name2-1052831874" - display_name = "displayName1615086568" - description = "description-1724546052" - expected_response = { - "name": name_2, - "display_name": display_name, - "description": description, - } - expected_response = dlp_pb2.InspectTemplate(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup Request - name = client.organization_inspect_template_path( - "[ORGANIZATION]", "[INSPECT_TEMPLATE]" - ) - - response = client.update_inspect_template(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.UpdateInspectTemplateRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_inspect_template_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup request - name = client.organization_inspect_template_path( - "[ORGANIZATION]", "[INSPECT_TEMPLATE]" - ) - - with pytest.raises(CustomException): - client.update_inspect_template(name) - - def test_get_inspect_template(self): - # Setup Expected Response - name_2 = "name2-1052831874" - display_name = "displayName1615086568" - description = "description-1724546052" - expected_response = { - "name": name_2, - "display_name": display_name, - "description": description, - } - expected_response = dlp_pb2.InspectTemplate(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup Request - name = client.organization_inspect_template_path( - "[ORGANIZATION]", "[INSPECT_TEMPLATE]" - ) - - response = client.get_inspect_template(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.GetInspectTemplateRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_inspect_template_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup request - name = client.organization_inspect_template_path( - "[ORGANIZATION]", "[INSPECT_TEMPLATE]" - ) - - with pytest.raises(CustomException): - client.get_inspect_template(name) - - def test_list_inspect_templates(self): - # Setup Expected Response - next_page_token = "" - inspect_templates_element = {} - inspect_templates = [inspect_templates_element] - expected_response = { - "next_page_token": next_page_token, - "inspect_templates": inspect_templates, - } - expected_response = dlp_pb2.ListInspectTemplatesResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup Request - parent = client.organization_location_path("[ORGANIZATION]", "[LOCATION]") - - paged_list_response = client.list_inspect_templates(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.inspect_templates[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.ListInspectTemplatesRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_inspect_templates_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup request - parent = client.organization_location_path("[ORGANIZATION]", "[LOCATION]") - - paged_list_response = client.list_inspect_templates(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_delete_inspect_template(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup Request - name = client.organization_inspect_template_path( - "[ORGANIZATION]", "[INSPECT_TEMPLATE]" - ) - - client.delete_inspect_template(name) - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.DeleteInspectTemplateRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_inspect_template_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup request - name = client.organization_inspect_template_path( - "[ORGANIZATION]", "[INSPECT_TEMPLATE]" - ) - - with pytest.raises(CustomException): - client.delete_inspect_template(name) - - def test_create_deidentify_template(self): - # Setup Expected Response - name = "name3373707" - display_name = "displayName1615086568" - description = "description-1724546052" - expected_response = { - "name": name, - "display_name": display_name, - "description": description, - } - expected_response = dlp_pb2.DeidentifyTemplate(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup Request - parent = client.organization_location_path("[ORGANIZATION]", "[LOCATION]") - deidentify_template = {} - - response = client.create_deidentify_template(parent, deidentify_template) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.CreateDeidentifyTemplateRequest( - parent=parent, deidentify_template=deidentify_template - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_deidentify_template_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup request - parent = client.organization_location_path("[ORGANIZATION]", "[LOCATION]") - deidentify_template = {} - - with pytest.raises(CustomException): - client.create_deidentify_template(parent, deidentify_template) - - def test_update_deidentify_template(self): - # Setup Expected Response - name_2 = "name2-1052831874" - display_name = "displayName1615086568" - description = "description-1724546052" - expected_response = { - "name": name_2, - "display_name": display_name, - "description": description, - } - expected_response = dlp_pb2.DeidentifyTemplate(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup Request - name = client.organization_deidentify_template_path( - "[ORGANIZATION]", "[DEIDENTIFY_TEMPLATE]" - ) - - response = client.update_deidentify_template(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.UpdateDeidentifyTemplateRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_deidentify_template_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup request - name = client.organization_deidentify_template_path( - "[ORGANIZATION]", "[DEIDENTIFY_TEMPLATE]" - ) - - with pytest.raises(CustomException): - client.update_deidentify_template(name) - - def test_get_deidentify_template(self): - # Setup Expected Response - name_2 = "name2-1052831874" - display_name = "displayName1615086568" - description = "description-1724546052" - expected_response = { - "name": name_2, - "display_name": display_name, - "description": description, - } - expected_response = dlp_pb2.DeidentifyTemplate(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup Request - name = client.organization_deidentify_template_path( - "[ORGANIZATION]", "[DEIDENTIFY_TEMPLATE]" - ) - - response = client.get_deidentify_template(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.GetDeidentifyTemplateRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_deidentify_template_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup request - name = client.organization_deidentify_template_path( - "[ORGANIZATION]", "[DEIDENTIFY_TEMPLATE]" - ) - - with pytest.raises(CustomException): - client.get_deidentify_template(name) - - def test_list_deidentify_templates(self): - # Setup Expected Response - next_page_token = "" - deidentify_templates_element = {} - deidentify_templates = [deidentify_templates_element] - expected_response = { - "next_page_token": next_page_token, - "deidentify_templates": deidentify_templates, - } - expected_response = dlp_pb2.ListDeidentifyTemplatesResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup Request - parent = client.organization_location_path("[ORGANIZATION]", "[LOCATION]") - - paged_list_response = client.list_deidentify_templates(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.deidentify_templates[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.ListDeidentifyTemplatesRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_deidentify_templates_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup request - parent = client.organization_location_path("[ORGANIZATION]", "[LOCATION]") - - paged_list_response = client.list_deidentify_templates(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_delete_deidentify_template(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup Request - name = client.organization_deidentify_template_path( - "[ORGANIZATION]", "[DEIDENTIFY_TEMPLATE]" - ) - - client.delete_deidentify_template(name) - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.DeleteDeidentifyTemplateRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_deidentify_template_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup request - name = client.organization_deidentify_template_path( - "[ORGANIZATION]", "[DEIDENTIFY_TEMPLATE]" - ) - - with pytest.raises(CustomException): - client.delete_deidentify_template(name) - - def test_create_dlp_job(self): - # Setup Expected Response - name = "name3373707" - job_trigger_name = "jobTriggerName1819490804" - expected_response = {"name": name, "job_trigger_name": job_trigger_name} - expected_response = dlp_pb2.DlpJob(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup Request - parent = client.project_path("[PROJECT]") - - response = client.create_dlp_job(parent) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.CreateDlpJobRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_dlp_job_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup request - parent = client.project_path("[PROJECT]") - - with pytest.raises(CustomException): - client.create_dlp_job(parent) - - def test_list_dlp_jobs(self): - # Setup Expected Response - next_page_token = "" - jobs_element = {} - jobs = [jobs_element] - expected_response = {"next_page_token": next_page_token, "jobs": jobs} - expected_response = dlp_pb2.ListDlpJobsResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup Request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_dlp_jobs(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.jobs[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.ListDlpJobsRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_dlp_jobs_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_dlp_jobs(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_get_dlp_job(self): - # Setup Expected Response - name_2 = "name2-1052831874" - job_trigger_name = "jobTriggerName1819490804" - expected_response = {"name": name_2, "job_trigger_name": job_trigger_name} - expected_response = dlp_pb2.DlpJob(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup Request - name = "name3373707" - - response = client.get_dlp_job(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.GetDlpJobRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_dlp_job_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup request - name = "name3373707" - - with pytest.raises(CustomException): - client.get_dlp_job(name) - - def test_delete_dlp_job(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup Request - name = "name3373707" - - client.delete_dlp_job(name) - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.DeleteDlpJobRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_dlp_job_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup request - name = "name3373707" - - with pytest.raises(CustomException): - client.delete_dlp_job(name) - - def test_cancel_dlp_job(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup Request - name = "name3373707" - - client.cancel_dlp_job(name) - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.CancelDlpJobRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_cancel_dlp_job_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup request - name = "name3373707" - - with pytest.raises(CustomException): - client.cancel_dlp_job(name) - - def test_finish_dlp_job(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup Request - name = "name3373707" - - client.finish_dlp_job(name) - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.FinishDlpJobRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_finish_dlp_job_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup request - name = "name3373707" - - with pytest.raises(CustomException): - client.finish_dlp_job(name) - - def test_hybrid_inspect_dlp_job(self): - # Setup Expected Response - expected_response = {} - expected_response = dlp_pb2.HybridInspectResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup Request - name = "name3373707" - - response = client.hybrid_inspect_dlp_job(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.HybridInspectDlpJobRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_hybrid_inspect_dlp_job_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup request - name = "name3373707" - - with pytest.raises(CustomException): - client.hybrid_inspect_dlp_job(name) - - def test_list_job_triggers(self): - # Setup Expected Response - next_page_token = "" - job_triggers_element = {} - job_triggers = [job_triggers_element] - expected_response = { - "next_page_token": next_page_token, - "job_triggers": job_triggers, - } - expected_response = dlp_pb2.ListJobTriggersResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup Request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_job_triggers(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.job_triggers[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.ListJobTriggersRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_job_triggers_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_job_triggers(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_get_job_trigger(self): - # Setup Expected Response - name_2 = "name2-1052831874" - display_name = "displayName1615086568" - description = "description-1724546052" - expected_response = { - "name": name_2, - "display_name": display_name, - "description": description, - } - expected_response = dlp_pb2.JobTrigger(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup Request - name = client.project_job_trigger_path("[PROJECT]", "[JOB_TRIGGER]") - - response = client.get_job_trigger(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.GetJobTriggerRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_job_trigger_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup request - name = client.project_job_trigger_path("[PROJECT]", "[JOB_TRIGGER]") - - with pytest.raises(CustomException): - client.get_job_trigger(name) - - def test_delete_job_trigger(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup Request - name = client.project_job_trigger_path("[PROJECT]", "[JOB_TRIGGER]") - - client.delete_job_trigger(name) - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.DeleteJobTriggerRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_job_trigger_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup request - name = client.project_job_trigger_path("[PROJECT]", "[JOB_TRIGGER]") - - with pytest.raises(CustomException): - client.delete_job_trigger(name) - - def test_hybrid_inspect_job_trigger(self): - # Setup Expected Response - expected_response = {} - expected_response = dlp_pb2.HybridInspectResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup Request - name = client.project_job_trigger_path("[PROJECT]", "[JOB_TRIGGER]") - - response = client.hybrid_inspect_job_trigger(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.HybridInspectJobTriggerRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_hybrid_inspect_job_trigger_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup request - name = client.project_job_trigger_path("[PROJECT]", "[JOB_TRIGGER]") - - with pytest.raises(CustomException): - client.hybrid_inspect_job_trigger(name) - - def test_update_job_trigger(self): - # Setup Expected Response - name_2 = "name2-1052831874" - display_name = "displayName1615086568" - description = "description-1724546052" - expected_response = { - "name": name_2, - "display_name": display_name, - "description": description, - } - expected_response = dlp_pb2.JobTrigger(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup Request - name = client.project_job_trigger_path("[PROJECT]", "[JOB_TRIGGER]") - - response = client.update_job_trigger(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.UpdateJobTriggerRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_job_trigger_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup request - name = client.project_job_trigger_path("[PROJECT]", "[JOB_TRIGGER]") - - with pytest.raises(CustomException): - client.update_job_trigger(name) - - def test_create_job_trigger(self): - # Setup Expected Response - name = "name3373707" - display_name = "displayName1615086568" - description = "description-1724546052" - expected_response = { - "name": name, - "display_name": display_name, - "description": description, - } - expected_response = dlp_pb2.JobTrigger(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup Request - parent = client.project_path("[PROJECT]") - job_trigger = {} - - response = client.create_job_trigger(parent, job_trigger) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.CreateJobTriggerRequest( - parent=parent, job_trigger=job_trigger - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_job_trigger_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup request - parent = client.project_path("[PROJECT]") - job_trigger = {} - - with pytest.raises(CustomException): - client.create_job_trigger(parent, job_trigger) - - def test_create_stored_info_type(self): - # Setup Expected Response - name = "name3373707" - expected_response = {"name": name} - expected_response = dlp_pb2.StoredInfoType(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup Request - parent = client.organization_location_path("[ORGANIZATION]", "[LOCATION]") - config = {} - - response = client.create_stored_info_type(parent, config) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.CreateStoredInfoTypeRequest( - parent=parent, config=config - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_stored_info_type_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup request - parent = client.organization_location_path("[ORGANIZATION]", "[LOCATION]") - config = {} - - with pytest.raises(CustomException): - client.create_stored_info_type(parent, config) - - def test_update_stored_info_type(self): - # Setup Expected Response - name_2 = "name2-1052831874" - expected_response = {"name": name_2} - expected_response = dlp_pb2.StoredInfoType(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup Request - name = client.organization_stored_info_type_path( - "[ORGANIZATION]", "[STORED_INFO_TYPE]" - ) - - response = client.update_stored_info_type(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.UpdateStoredInfoTypeRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_stored_info_type_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup request - name = client.organization_stored_info_type_path( - "[ORGANIZATION]", "[STORED_INFO_TYPE]" - ) - - with pytest.raises(CustomException): - client.update_stored_info_type(name) - - def test_get_stored_info_type(self): - # Setup Expected Response - name_2 = "name2-1052831874" - expected_response = {"name": name_2} - expected_response = dlp_pb2.StoredInfoType(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup Request - name = client.organization_stored_info_type_path( - "[ORGANIZATION]", "[STORED_INFO_TYPE]" - ) - - response = client.get_stored_info_type(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.GetStoredInfoTypeRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_stored_info_type_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup request - name = client.organization_stored_info_type_path( - "[ORGANIZATION]", "[STORED_INFO_TYPE]" - ) - - with pytest.raises(CustomException): - client.get_stored_info_type(name) - - def test_list_stored_info_types(self): - # Setup Expected Response - next_page_token = "" - stored_info_types_element = {} - stored_info_types = [stored_info_types_element] - expected_response = { - "next_page_token": next_page_token, - "stored_info_types": stored_info_types, - } - expected_response = dlp_pb2.ListStoredInfoTypesResponse(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup Request - parent = client.organization_location_path("[ORGANIZATION]", "[LOCATION]") - - paged_list_response = client.list_stored_info_types(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.stored_info_types[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.ListStoredInfoTypesRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_stored_info_types_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup request - parent = client.organization_location_path("[ORGANIZATION]", "[LOCATION]") - - paged_list_response = client.list_stored_info_types(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_delete_stored_info_type(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup Request - name = client.organization_stored_info_type_path( - "[ORGANIZATION]", "[STORED_INFO_TYPE]" - ) - - client.delete_stored_info_type(name) - - assert len(channel.requests) == 1 - expected_request = dlp_pb2.DeleteStoredInfoTypeRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_stored_info_type_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = dlp_v2.DlpServiceClient() - - # Setup request - name = client.organization_stored_info_type_path( - "[ORGANIZATION]", "[STORED_INFO_TYPE]" - ) - - with pytest.raises(CustomException): - client.delete_stored_info_type(name)