From 5a8d4da83ea120d9947e2fc234a214f0d0ef3239 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 29 Jun 2021 22:20:59 +0000 Subject: [PATCH 1/2] chore: use gapic-generator-python 0.50.3 fix: disable always_use_jwt_access Committer: @busunkim96 PiperOrigin-RevId: 382142900 Source-Link: https://github.com/googleapis/googleapis/commit/513440fda515f3c799c22a30e3906dcda325004e Source-Link: https://github.com/googleapis/googleapis-gen/commit/7b1e2c31233f79a704ec21ca410bf661d6bc68d0 --- owl-bot-staging/v2/.coveragerc | 17 + owl-bot-staging/v2/MANIFEST.in | 2 + owl-bot-staging/v2/README.rst | 49 + owl-bot-staging/v2/docs/conf.py | 376 + .../v2/docs/dlp_v2/dlp_service.rst | 10 + owl-bot-staging/v2/docs/dlp_v2/services.rst | 6 + owl-bot-staging/v2/docs/dlp_v2/types.rst | 7 + owl-bot-staging/v2/docs/index.rst | 7 + .../v2/google/cloud/dlp/__init__.py | 333 + owl-bot-staging/v2/google/cloud/dlp/py.typed | 2 + .../v2/google/cloud/dlp_v2/__init__.py | 334 + .../google/cloud/dlp_v2/gapic_metadata.json | 363 + .../v2/google/cloud/dlp_v2/py.typed | 2 + .../google/cloud/dlp_v2/services/__init__.py | 15 + .../dlp_v2/services/dlp_service/__init__.py | 22 + .../services/dlp_service/async_client.py | 3237 ++++++ .../dlp_v2/services/dlp_service/client.py | 3345 ++++++ .../dlp_v2/services/dlp_service/pagers.py | 628 ++ .../dlp_service/transports/__init__.py | 33 + .../services/dlp_service/transports/base.py | 771 ++ .../services/dlp_service/transports/grpc.py | 1244 +++ .../dlp_service/transports/grpc_asyncio.py | 1248 +++ .../v2/google/cloud/dlp_v2/types/__init__.py | 332 + .../v2/google/cloud/dlp_v2/types/dlp.py | 6338 +++++++++++ .../v2/google/cloud/dlp_v2/types/storage.py | 1202 +++ owl-bot-staging/v2/mypy.ini | 3 + owl-bot-staging/v2/noxfile.py | 132 + .../v2/scripts/fixup_dlp_v2_keywords.py | 209 + owl-bot-staging/v2/setup.py | 53 + owl-bot-staging/v2/tests/__init__.py | 16 + owl-bot-staging/v2/tests/unit/__init__.py | 16 + .../v2/tests/unit/gapic/__init__.py | 16 + .../v2/tests/unit/gapic/dlp_v2/__init__.py | 16 + .../unit/gapic/dlp_v2/test_dlp_service.py | 9393 +++++++++++++++++ 34 files changed, 29777 insertions(+) create mode 100644 owl-bot-staging/v2/.coveragerc create mode 100644 owl-bot-staging/v2/MANIFEST.in create mode 100644 owl-bot-staging/v2/README.rst create mode 100644 owl-bot-staging/v2/docs/conf.py create mode 100644 owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst create mode 100644 owl-bot-staging/v2/docs/dlp_v2/services.rst create mode 100644 owl-bot-staging/v2/docs/dlp_v2/types.rst create mode 100644 owl-bot-staging/v2/docs/index.rst create mode 100644 owl-bot-staging/v2/google/cloud/dlp/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp/py.typed create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/py.typed create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py create mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py create mode 100644 owl-bot-staging/v2/mypy.ini create mode 100644 owl-bot-staging/v2/noxfile.py create mode 100644 owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py create mode 100644 owl-bot-staging/v2/setup.py create mode 100644 owl-bot-staging/v2/tests/__init__.py create mode 100644 owl-bot-staging/v2/tests/unit/__init__.py create mode 100644 owl-bot-staging/v2/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py create mode 100644 owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py diff --git a/owl-bot-staging/v2/.coveragerc b/owl-bot-staging/v2/.coveragerc new file mode 100644 index 00000000..f8366a49 --- /dev/null +++ b/owl-bot-staging/v2/.coveragerc @@ -0,0 +1,17 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/dlp/__init__.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound diff --git a/owl-bot-staging/v2/MANIFEST.in b/owl-bot-staging/v2/MANIFEST.in new file mode 100644 index 00000000..148f6bf3 --- /dev/null +++ b/owl-bot-staging/v2/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/dlp *.py +recursive-include google/cloud/dlp_v2 *.py diff --git a/owl-bot-staging/v2/README.rst b/owl-bot-staging/v2/README.rst new file mode 100644 index 00000000..cf97c2e7 --- /dev/null +++ b/owl-bot-staging/v2/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Dlp API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Dlp API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v2/docs/conf.py b/owl-bot-staging/v2/docs/conf.py new file mode 100644 index 00000000..c0dad2c2 --- /dev/null +++ b/owl-bot-staging/v2/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-dlp documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.6.3" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = "index" + +# General information about the project. +project = u"google-cloud-dlp" +copyright = u"2020, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-dlp-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + master_doc, + "google-cloud-dlp.tex", + u"google-cloud-dlp Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + master_doc, + "google-cloud-dlp", + u"Google Cloud Dlp Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + master_doc, + "google-cloud-dlp", + u"google-cloud-dlp Documentation", + author, + "google-cloud-dlp", + "GAPIC library for Google Cloud Dlp API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst b/owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst new file mode 100644 index 00000000..914da512 --- /dev/null +++ b/owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst @@ -0,0 +1,10 @@ +DlpService +---------------------------- + +.. automodule:: google.cloud.dlp_v2.services.dlp_service + :members: + :inherited-members: + +.. automodule:: google.cloud.dlp_v2.services.dlp_service.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v2/docs/dlp_v2/services.rst b/owl-bot-staging/v2/docs/dlp_v2/services.rst new file mode 100644 index 00000000..864a8c83 --- /dev/null +++ b/owl-bot-staging/v2/docs/dlp_v2/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Dlp v2 API +==================================== +.. toctree:: + :maxdepth: 2 + + dlp_service diff --git a/owl-bot-staging/v2/docs/dlp_v2/types.rst b/owl-bot-staging/v2/docs/dlp_v2/types.rst new file mode 100644 index 00000000..f2a1a4f5 --- /dev/null +++ b/owl-bot-staging/v2/docs/dlp_v2/types.rst @@ -0,0 +1,7 @@ +Types for Google Cloud Dlp v2 API +================================= + +.. automodule:: google.cloud.dlp_v2.types + :members: + :undoc-members: + :show-inheritance: diff --git a/owl-bot-staging/v2/docs/index.rst b/owl-bot-staging/v2/docs/index.rst new file mode 100644 index 00000000..d119451a --- /dev/null +++ b/owl-bot-staging/v2/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + dlp_v2/services + dlp_v2/types diff --git a/owl-bot-staging/v2/google/cloud/dlp/__init__.py b/owl-bot-staging/v2/google/cloud/dlp/__init__.py new file mode 100644 index 00000000..32e9a89f --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp/__init__.py @@ -0,0 +1,333 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.cloud.dlp_v2.services.dlp_service.client import DlpServiceClient +from google.cloud.dlp_v2.services.dlp_service.async_client import DlpServiceAsyncClient + +from google.cloud.dlp_v2.types.dlp import Action +from google.cloud.dlp_v2.types.dlp import ActivateJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import AnalyzeDataSourceRiskDetails +from google.cloud.dlp_v2.types.dlp import BoundingBox +from google.cloud.dlp_v2.types.dlp import BucketingConfig +from google.cloud.dlp_v2.types.dlp import ByteContentItem +from google.cloud.dlp_v2.types.dlp import CancelDlpJobRequest +from google.cloud.dlp_v2.types.dlp import CharacterMaskConfig +from google.cloud.dlp_v2.types.dlp import CharsToIgnore +from google.cloud.dlp_v2.types.dlp import Color +from google.cloud.dlp_v2.types.dlp import Container +from google.cloud.dlp_v2.types.dlp import ContentItem +from google.cloud.dlp_v2.types.dlp import ContentLocation +from google.cloud.dlp_v2.types.dlp import CreateDeidentifyTemplateRequest +from google.cloud.dlp_v2.types.dlp import CreateDlpJobRequest +from google.cloud.dlp_v2.types.dlp import CreateInspectTemplateRequest +from google.cloud.dlp_v2.types.dlp import CreateJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import CreateStoredInfoTypeRequest +from google.cloud.dlp_v2.types.dlp import CryptoDeterministicConfig +from google.cloud.dlp_v2.types.dlp import CryptoHashConfig +from google.cloud.dlp_v2.types.dlp import CryptoKey +from google.cloud.dlp_v2.types.dlp import CryptoReplaceFfxFpeConfig +from google.cloud.dlp_v2.types.dlp import DateShiftConfig +from google.cloud.dlp_v2.types.dlp import DateTime +from google.cloud.dlp_v2.types.dlp import DeidentifyConfig +from google.cloud.dlp_v2.types.dlp import DeidentifyContentRequest +from google.cloud.dlp_v2.types.dlp import DeidentifyContentResponse +from google.cloud.dlp_v2.types.dlp import DeidentifyTemplate +from google.cloud.dlp_v2.types.dlp import DeleteDeidentifyTemplateRequest +from google.cloud.dlp_v2.types.dlp import DeleteDlpJobRequest +from google.cloud.dlp_v2.types.dlp import DeleteInspectTemplateRequest +from google.cloud.dlp_v2.types.dlp import DeleteJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import DeleteStoredInfoTypeRequest +from google.cloud.dlp_v2.types.dlp import DlpJob +from google.cloud.dlp_v2.types.dlp import DocumentLocation +from google.cloud.dlp_v2.types.dlp import Error +from google.cloud.dlp_v2.types.dlp import ExcludeInfoTypes +from google.cloud.dlp_v2.types.dlp import ExclusionRule +from google.cloud.dlp_v2.types.dlp import FieldTransformation +from google.cloud.dlp_v2.types.dlp import Finding +from google.cloud.dlp_v2.types.dlp import FinishDlpJobRequest +from google.cloud.dlp_v2.types.dlp import FixedSizeBucketingConfig +from google.cloud.dlp_v2.types.dlp import GetDeidentifyTemplateRequest +from google.cloud.dlp_v2.types.dlp import GetDlpJobRequest +from google.cloud.dlp_v2.types.dlp import GetInspectTemplateRequest +from google.cloud.dlp_v2.types.dlp import GetJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import GetStoredInfoTypeRequest +from google.cloud.dlp_v2.types.dlp import HybridContentItem +from google.cloud.dlp_v2.types.dlp import HybridFindingDetails +from google.cloud.dlp_v2.types.dlp import HybridInspectDlpJobRequest +from google.cloud.dlp_v2.types.dlp import HybridInspectJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import HybridInspectResponse +from google.cloud.dlp_v2.types.dlp import HybridInspectStatistics +from google.cloud.dlp_v2.types.dlp import ImageLocation +from google.cloud.dlp_v2.types.dlp import InfoTypeDescription +from google.cloud.dlp_v2.types.dlp import InfoTypeStats +from google.cloud.dlp_v2.types.dlp import InfoTypeTransformations +from google.cloud.dlp_v2.types.dlp import InspectConfig +from google.cloud.dlp_v2.types.dlp import InspectContentRequest +from google.cloud.dlp_v2.types.dlp import InspectContentResponse +from google.cloud.dlp_v2.types.dlp import InspectDataSourceDetails +from google.cloud.dlp_v2.types.dlp import InspectionRule +from google.cloud.dlp_v2.types.dlp import InspectionRuleSet +from google.cloud.dlp_v2.types.dlp import InspectJobConfig +from google.cloud.dlp_v2.types.dlp import InspectResult +from google.cloud.dlp_v2.types.dlp import InspectTemplate +from google.cloud.dlp_v2.types.dlp import JobTrigger +from google.cloud.dlp_v2.types.dlp import KmsWrappedCryptoKey +from google.cloud.dlp_v2.types.dlp import LargeCustomDictionaryConfig +from google.cloud.dlp_v2.types.dlp import LargeCustomDictionaryStats +from google.cloud.dlp_v2.types.dlp import ListDeidentifyTemplatesRequest +from google.cloud.dlp_v2.types.dlp import ListDeidentifyTemplatesResponse +from google.cloud.dlp_v2.types.dlp import ListDlpJobsRequest +from google.cloud.dlp_v2.types.dlp import ListDlpJobsResponse +from google.cloud.dlp_v2.types.dlp import ListInfoTypesRequest +from google.cloud.dlp_v2.types.dlp import ListInfoTypesResponse +from google.cloud.dlp_v2.types.dlp import ListInspectTemplatesRequest +from google.cloud.dlp_v2.types.dlp import ListInspectTemplatesResponse +from google.cloud.dlp_v2.types.dlp import ListJobTriggersRequest +from google.cloud.dlp_v2.types.dlp import ListJobTriggersResponse +from google.cloud.dlp_v2.types.dlp import ListStoredInfoTypesRequest +from google.cloud.dlp_v2.types.dlp import ListStoredInfoTypesResponse +from google.cloud.dlp_v2.types.dlp import Location +from google.cloud.dlp_v2.types.dlp import Manual +from google.cloud.dlp_v2.types.dlp import MetadataLocation +from google.cloud.dlp_v2.types.dlp import OutputStorageConfig +from google.cloud.dlp_v2.types.dlp import PrimitiveTransformation +from google.cloud.dlp_v2.types.dlp import PrivacyMetric +from google.cloud.dlp_v2.types.dlp import QuasiId +from google.cloud.dlp_v2.types.dlp import QuoteInfo +from google.cloud.dlp_v2.types.dlp import Range +from google.cloud.dlp_v2.types.dlp import RecordCondition +from google.cloud.dlp_v2.types.dlp import RecordLocation +from google.cloud.dlp_v2.types.dlp import RecordSuppression +from google.cloud.dlp_v2.types.dlp import RecordTransformations +from google.cloud.dlp_v2.types.dlp import RedactConfig +from google.cloud.dlp_v2.types.dlp import RedactImageRequest +from google.cloud.dlp_v2.types.dlp import RedactImageResponse +from google.cloud.dlp_v2.types.dlp import ReidentifyContentRequest +from google.cloud.dlp_v2.types.dlp import ReidentifyContentResponse +from google.cloud.dlp_v2.types.dlp import ReplaceValueConfig +from google.cloud.dlp_v2.types.dlp import ReplaceWithInfoTypeConfig +from google.cloud.dlp_v2.types.dlp import RiskAnalysisJobConfig +from google.cloud.dlp_v2.types.dlp import Schedule +from google.cloud.dlp_v2.types.dlp import StatisticalTable +from google.cloud.dlp_v2.types.dlp import StorageMetadataLabel +from google.cloud.dlp_v2.types.dlp import StoredInfoType +from google.cloud.dlp_v2.types.dlp import StoredInfoTypeConfig +from google.cloud.dlp_v2.types.dlp import StoredInfoTypeStats +from google.cloud.dlp_v2.types.dlp import StoredInfoTypeVersion +from google.cloud.dlp_v2.types.dlp import Table +from google.cloud.dlp_v2.types.dlp import TableLocation +from google.cloud.dlp_v2.types.dlp import TimePartConfig +from google.cloud.dlp_v2.types.dlp import TransformationErrorHandling +from google.cloud.dlp_v2.types.dlp import TransformationOverview +from google.cloud.dlp_v2.types.dlp import TransformationSummary +from google.cloud.dlp_v2.types.dlp import TransientCryptoKey +from google.cloud.dlp_v2.types.dlp import UnwrappedCryptoKey +from google.cloud.dlp_v2.types.dlp import UpdateDeidentifyTemplateRequest +from google.cloud.dlp_v2.types.dlp import UpdateInspectTemplateRequest +from google.cloud.dlp_v2.types.dlp import UpdateJobTriggerRequest +from google.cloud.dlp_v2.types.dlp import UpdateStoredInfoTypeRequest +from google.cloud.dlp_v2.types.dlp import Value +from google.cloud.dlp_v2.types.dlp import ValueFrequency +from google.cloud.dlp_v2.types.dlp import ContentOption +from google.cloud.dlp_v2.types.dlp import DlpJobType +from google.cloud.dlp_v2.types.dlp import InfoTypeSupportedBy +from google.cloud.dlp_v2.types.dlp import MatchingType +from google.cloud.dlp_v2.types.dlp import MetadataType +from google.cloud.dlp_v2.types.dlp import RelationalOperator +from google.cloud.dlp_v2.types.dlp import StoredInfoTypeState +from google.cloud.dlp_v2.types.storage import BigQueryField +from google.cloud.dlp_v2.types.storage import BigQueryKey +from google.cloud.dlp_v2.types.storage import BigQueryOptions +from google.cloud.dlp_v2.types.storage import BigQueryTable +from google.cloud.dlp_v2.types.storage import CloudStorageFileSet +from google.cloud.dlp_v2.types.storage import CloudStorageOptions +from google.cloud.dlp_v2.types.storage import CloudStoragePath +from google.cloud.dlp_v2.types.storage import CloudStorageRegexFileSet +from google.cloud.dlp_v2.types.storage import CustomInfoType +from google.cloud.dlp_v2.types.storage import DatastoreKey +from google.cloud.dlp_v2.types.storage import DatastoreOptions +from google.cloud.dlp_v2.types.storage import EntityId +from google.cloud.dlp_v2.types.storage import FieldId +from google.cloud.dlp_v2.types.storage import HybridOptions +from google.cloud.dlp_v2.types.storage import InfoType +from google.cloud.dlp_v2.types.storage import Key +from google.cloud.dlp_v2.types.storage import KindExpression +from google.cloud.dlp_v2.types.storage import PartitionId +from google.cloud.dlp_v2.types.storage import RecordKey +from google.cloud.dlp_v2.types.storage import StorageConfig +from google.cloud.dlp_v2.types.storage import StoredType +from google.cloud.dlp_v2.types.storage import TableOptions +from google.cloud.dlp_v2.types.storage import FileType +from google.cloud.dlp_v2.types.storage import Likelihood + +__all__ = ('DlpServiceClient', + 'DlpServiceAsyncClient', + 'Action', + 'ActivateJobTriggerRequest', + 'AnalyzeDataSourceRiskDetails', + 'BoundingBox', + 'BucketingConfig', + 'ByteContentItem', + 'CancelDlpJobRequest', + 'CharacterMaskConfig', + 'CharsToIgnore', + 'Color', + 'Container', + 'ContentItem', + 'ContentLocation', + 'CreateDeidentifyTemplateRequest', + 'CreateDlpJobRequest', + 'CreateInspectTemplateRequest', + 'CreateJobTriggerRequest', + 'CreateStoredInfoTypeRequest', + 'CryptoDeterministicConfig', + 'CryptoHashConfig', + 'CryptoKey', + 'CryptoReplaceFfxFpeConfig', + 'DateShiftConfig', + 'DateTime', + 'DeidentifyConfig', + 'DeidentifyContentRequest', + 'DeidentifyContentResponse', + 'DeidentifyTemplate', + 'DeleteDeidentifyTemplateRequest', + 'DeleteDlpJobRequest', + 'DeleteInspectTemplateRequest', + 'DeleteJobTriggerRequest', + 'DeleteStoredInfoTypeRequest', + 'DlpJob', + 'DocumentLocation', + 'Error', + 'ExcludeInfoTypes', + 'ExclusionRule', + 'FieldTransformation', + 'Finding', + 'FinishDlpJobRequest', + 'FixedSizeBucketingConfig', + 'GetDeidentifyTemplateRequest', + 'GetDlpJobRequest', + 'GetInspectTemplateRequest', + 'GetJobTriggerRequest', + 'GetStoredInfoTypeRequest', + 'HybridContentItem', + 'HybridFindingDetails', + 'HybridInspectDlpJobRequest', + 'HybridInspectJobTriggerRequest', + 'HybridInspectResponse', + 'HybridInspectStatistics', + 'ImageLocation', + 'InfoTypeDescription', + 'InfoTypeStats', + 'InfoTypeTransformations', + 'InspectConfig', + 'InspectContentRequest', + 'InspectContentResponse', + 'InspectDataSourceDetails', + 'InspectionRule', + 'InspectionRuleSet', + 'InspectJobConfig', + 'InspectResult', + 'InspectTemplate', + 'JobTrigger', + 'KmsWrappedCryptoKey', + 'LargeCustomDictionaryConfig', + 'LargeCustomDictionaryStats', + 'ListDeidentifyTemplatesRequest', + 'ListDeidentifyTemplatesResponse', + 'ListDlpJobsRequest', + 'ListDlpJobsResponse', + 'ListInfoTypesRequest', + 'ListInfoTypesResponse', + 'ListInspectTemplatesRequest', + 'ListInspectTemplatesResponse', + 'ListJobTriggersRequest', + 'ListJobTriggersResponse', + 'ListStoredInfoTypesRequest', + 'ListStoredInfoTypesResponse', + 'Location', + 'Manual', + 'MetadataLocation', + 'OutputStorageConfig', + 'PrimitiveTransformation', + 'PrivacyMetric', + 'QuasiId', + 'QuoteInfo', + 'Range', + 'RecordCondition', + 'RecordLocation', + 'RecordSuppression', + 'RecordTransformations', + 'RedactConfig', + 'RedactImageRequest', + 'RedactImageResponse', + 'ReidentifyContentRequest', + 'ReidentifyContentResponse', + 'ReplaceValueConfig', + 'ReplaceWithInfoTypeConfig', + 'RiskAnalysisJobConfig', + 'Schedule', + 'StatisticalTable', + 'StorageMetadataLabel', + 'StoredInfoType', + 'StoredInfoTypeConfig', + 'StoredInfoTypeStats', + 'StoredInfoTypeVersion', + 'Table', + 'TableLocation', + 'TimePartConfig', + 'TransformationErrorHandling', + 'TransformationOverview', + 'TransformationSummary', + 'TransientCryptoKey', + 'UnwrappedCryptoKey', + 'UpdateDeidentifyTemplateRequest', + 'UpdateInspectTemplateRequest', + 'UpdateJobTriggerRequest', + 'UpdateStoredInfoTypeRequest', + 'Value', + 'ValueFrequency', + 'ContentOption', + 'DlpJobType', + 'InfoTypeSupportedBy', + 'MatchingType', + 'MetadataType', + 'RelationalOperator', + 'StoredInfoTypeState', + 'BigQueryField', + 'BigQueryKey', + 'BigQueryOptions', + 'BigQueryTable', + 'CloudStorageFileSet', + 'CloudStorageOptions', + 'CloudStoragePath', + 'CloudStorageRegexFileSet', + 'CustomInfoType', + 'DatastoreKey', + 'DatastoreOptions', + 'EntityId', + 'FieldId', + 'HybridOptions', + 'InfoType', + 'Key', + 'KindExpression', + 'PartitionId', + 'RecordKey', + 'StorageConfig', + 'StoredType', + 'TableOptions', + 'FileType', + 'Likelihood', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp/py.typed b/owl-bot-staging/v2/google/cloud/dlp/py.typed new file mode 100644 index 00000000..23d89ef3 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-dlp package uses inline types. diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py new file mode 100644 index 00000000..d20b32f7 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py @@ -0,0 +1,334 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .services.dlp_service import DlpServiceClient +from .services.dlp_service import DlpServiceAsyncClient + +from .types.dlp import Action +from .types.dlp import ActivateJobTriggerRequest +from .types.dlp import AnalyzeDataSourceRiskDetails +from .types.dlp import BoundingBox +from .types.dlp import BucketingConfig +from .types.dlp import ByteContentItem +from .types.dlp import CancelDlpJobRequest +from .types.dlp import CharacterMaskConfig +from .types.dlp import CharsToIgnore +from .types.dlp import Color +from .types.dlp import Container +from .types.dlp import ContentItem +from .types.dlp import ContentLocation +from .types.dlp import CreateDeidentifyTemplateRequest +from .types.dlp import CreateDlpJobRequest +from .types.dlp import CreateInspectTemplateRequest +from .types.dlp import CreateJobTriggerRequest +from .types.dlp import CreateStoredInfoTypeRequest +from .types.dlp import CryptoDeterministicConfig +from .types.dlp import CryptoHashConfig +from .types.dlp import CryptoKey +from .types.dlp import CryptoReplaceFfxFpeConfig +from .types.dlp import DateShiftConfig +from .types.dlp import DateTime +from .types.dlp import DeidentifyConfig +from .types.dlp import DeidentifyContentRequest +from .types.dlp import DeidentifyContentResponse +from .types.dlp import DeidentifyTemplate +from .types.dlp import DeleteDeidentifyTemplateRequest +from .types.dlp import DeleteDlpJobRequest +from .types.dlp import DeleteInspectTemplateRequest +from .types.dlp import DeleteJobTriggerRequest +from .types.dlp import DeleteStoredInfoTypeRequest +from .types.dlp import DlpJob +from .types.dlp import DocumentLocation +from .types.dlp import Error +from .types.dlp import ExcludeInfoTypes +from .types.dlp import ExclusionRule +from .types.dlp import FieldTransformation +from .types.dlp import Finding +from .types.dlp import FinishDlpJobRequest +from .types.dlp import FixedSizeBucketingConfig +from .types.dlp import GetDeidentifyTemplateRequest +from .types.dlp import GetDlpJobRequest +from .types.dlp import GetInspectTemplateRequest +from .types.dlp import GetJobTriggerRequest +from .types.dlp import GetStoredInfoTypeRequest +from .types.dlp import HybridContentItem +from .types.dlp import HybridFindingDetails +from .types.dlp import HybridInspectDlpJobRequest +from .types.dlp import HybridInspectJobTriggerRequest +from .types.dlp import HybridInspectResponse +from .types.dlp import HybridInspectStatistics +from .types.dlp import ImageLocation +from .types.dlp import InfoTypeDescription +from .types.dlp import InfoTypeStats +from .types.dlp import InfoTypeTransformations +from .types.dlp import InspectConfig +from .types.dlp import InspectContentRequest +from .types.dlp import InspectContentResponse +from .types.dlp import InspectDataSourceDetails +from .types.dlp import InspectionRule +from .types.dlp import InspectionRuleSet +from .types.dlp import InspectJobConfig +from .types.dlp import InspectResult +from .types.dlp import InspectTemplate +from .types.dlp import JobTrigger +from .types.dlp import KmsWrappedCryptoKey +from .types.dlp import LargeCustomDictionaryConfig +from .types.dlp import LargeCustomDictionaryStats +from .types.dlp import ListDeidentifyTemplatesRequest +from .types.dlp import ListDeidentifyTemplatesResponse +from .types.dlp import ListDlpJobsRequest +from .types.dlp import ListDlpJobsResponse +from .types.dlp import ListInfoTypesRequest +from .types.dlp import ListInfoTypesResponse +from .types.dlp import ListInspectTemplatesRequest +from .types.dlp import ListInspectTemplatesResponse +from .types.dlp import ListJobTriggersRequest +from .types.dlp import ListJobTriggersResponse +from .types.dlp import ListStoredInfoTypesRequest +from .types.dlp import ListStoredInfoTypesResponse +from .types.dlp import Location +from .types.dlp import Manual +from .types.dlp import MetadataLocation +from .types.dlp import OutputStorageConfig +from .types.dlp import PrimitiveTransformation +from .types.dlp import PrivacyMetric +from .types.dlp import QuasiId +from .types.dlp import QuoteInfo +from .types.dlp import Range +from .types.dlp import RecordCondition +from .types.dlp import RecordLocation +from .types.dlp import RecordSuppression +from .types.dlp import RecordTransformations +from .types.dlp import RedactConfig +from .types.dlp import RedactImageRequest +from .types.dlp import RedactImageResponse +from .types.dlp import ReidentifyContentRequest +from .types.dlp import ReidentifyContentResponse +from .types.dlp import ReplaceValueConfig +from .types.dlp import ReplaceWithInfoTypeConfig +from .types.dlp import RiskAnalysisJobConfig +from .types.dlp import Schedule +from .types.dlp import StatisticalTable +from .types.dlp import StorageMetadataLabel +from .types.dlp import StoredInfoType +from .types.dlp import StoredInfoTypeConfig +from .types.dlp import StoredInfoTypeStats +from .types.dlp import StoredInfoTypeVersion +from .types.dlp import Table +from .types.dlp import TableLocation +from .types.dlp import TimePartConfig +from .types.dlp import TransformationErrorHandling +from .types.dlp import TransformationOverview +from .types.dlp import TransformationSummary +from .types.dlp import TransientCryptoKey +from .types.dlp import UnwrappedCryptoKey +from .types.dlp import UpdateDeidentifyTemplateRequest +from .types.dlp import UpdateInspectTemplateRequest +from .types.dlp import UpdateJobTriggerRequest +from .types.dlp import UpdateStoredInfoTypeRequest +from .types.dlp import Value +from .types.dlp import ValueFrequency +from .types.dlp import ContentOption +from .types.dlp import DlpJobType +from .types.dlp import InfoTypeSupportedBy +from .types.dlp import MatchingType +from .types.dlp import MetadataType +from .types.dlp import RelationalOperator +from .types.dlp import StoredInfoTypeState +from .types.storage import BigQueryField +from .types.storage import BigQueryKey +from .types.storage import BigQueryOptions +from .types.storage import BigQueryTable +from .types.storage import CloudStorageFileSet +from .types.storage import CloudStorageOptions +from .types.storage import CloudStoragePath +from .types.storage import CloudStorageRegexFileSet +from .types.storage import CustomInfoType +from .types.storage import DatastoreKey +from .types.storage import DatastoreOptions +from .types.storage import EntityId +from .types.storage import FieldId +from .types.storage import HybridOptions +from .types.storage import InfoType +from .types.storage import Key +from .types.storage import KindExpression +from .types.storage import PartitionId +from .types.storage import RecordKey +from .types.storage import StorageConfig +from .types.storage import StoredType +from .types.storage import TableOptions +from .types.storage import FileType +from .types.storage import Likelihood + +__all__ = ( + 'DlpServiceAsyncClient', +'Action', +'ActivateJobTriggerRequest', +'AnalyzeDataSourceRiskDetails', +'BigQueryField', +'BigQueryKey', +'BigQueryOptions', +'BigQueryTable', +'BoundingBox', +'BucketingConfig', +'ByteContentItem', +'CancelDlpJobRequest', +'CharacterMaskConfig', +'CharsToIgnore', +'CloudStorageFileSet', +'CloudStorageOptions', +'CloudStoragePath', +'CloudStorageRegexFileSet', +'Color', +'Container', +'ContentItem', +'ContentLocation', +'ContentOption', +'CreateDeidentifyTemplateRequest', +'CreateDlpJobRequest', +'CreateInspectTemplateRequest', +'CreateJobTriggerRequest', +'CreateStoredInfoTypeRequest', +'CryptoDeterministicConfig', +'CryptoHashConfig', +'CryptoKey', +'CryptoReplaceFfxFpeConfig', +'CustomInfoType', +'DatastoreKey', +'DatastoreOptions', +'DateShiftConfig', +'DateTime', +'DeidentifyConfig', +'DeidentifyContentRequest', +'DeidentifyContentResponse', +'DeidentifyTemplate', +'DeleteDeidentifyTemplateRequest', +'DeleteDlpJobRequest', +'DeleteInspectTemplateRequest', +'DeleteJobTriggerRequest', +'DeleteStoredInfoTypeRequest', +'DlpJob', +'DlpJobType', +'DlpServiceClient', +'DocumentLocation', +'EntityId', +'Error', +'ExcludeInfoTypes', +'ExclusionRule', +'FieldId', +'FieldTransformation', +'FileType', +'Finding', +'FinishDlpJobRequest', +'FixedSizeBucketingConfig', +'GetDeidentifyTemplateRequest', +'GetDlpJobRequest', +'GetInspectTemplateRequest', +'GetJobTriggerRequest', +'GetStoredInfoTypeRequest', +'HybridContentItem', +'HybridFindingDetails', +'HybridInspectDlpJobRequest', +'HybridInspectJobTriggerRequest', +'HybridInspectResponse', +'HybridInspectStatistics', +'HybridOptions', +'ImageLocation', +'InfoType', +'InfoTypeDescription', +'InfoTypeStats', +'InfoTypeSupportedBy', +'InfoTypeTransformations', +'InspectConfig', +'InspectContentRequest', +'InspectContentResponse', +'InspectDataSourceDetails', +'InspectJobConfig', +'InspectResult', +'InspectTemplate', +'InspectionRule', +'InspectionRuleSet', +'JobTrigger', +'Key', +'KindExpression', +'KmsWrappedCryptoKey', +'LargeCustomDictionaryConfig', +'LargeCustomDictionaryStats', +'Likelihood', +'ListDeidentifyTemplatesRequest', +'ListDeidentifyTemplatesResponse', +'ListDlpJobsRequest', +'ListDlpJobsResponse', +'ListInfoTypesRequest', +'ListInfoTypesResponse', +'ListInspectTemplatesRequest', +'ListInspectTemplatesResponse', +'ListJobTriggersRequest', +'ListJobTriggersResponse', +'ListStoredInfoTypesRequest', +'ListStoredInfoTypesResponse', +'Location', +'Manual', +'MatchingType', +'MetadataLocation', +'MetadataType', +'OutputStorageConfig', +'PartitionId', +'PrimitiveTransformation', +'PrivacyMetric', +'QuasiId', +'QuoteInfo', +'Range', +'RecordCondition', +'RecordKey', +'RecordLocation', +'RecordSuppression', +'RecordTransformations', +'RedactConfig', +'RedactImageRequest', +'RedactImageResponse', +'ReidentifyContentRequest', +'ReidentifyContentResponse', +'RelationalOperator', +'ReplaceValueConfig', +'ReplaceWithInfoTypeConfig', +'RiskAnalysisJobConfig', +'Schedule', +'StatisticalTable', +'StorageConfig', +'StorageMetadataLabel', +'StoredInfoType', +'StoredInfoTypeConfig', +'StoredInfoTypeState', +'StoredInfoTypeStats', +'StoredInfoTypeVersion', +'StoredType', +'Table', +'TableLocation', +'TableOptions', +'TimePartConfig', +'TransformationErrorHandling', +'TransformationOverview', +'TransformationSummary', +'TransientCryptoKey', +'UnwrappedCryptoKey', +'UpdateDeidentifyTemplateRequest', +'UpdateInspectTemplateRequest', +'UpdateJobTriggerRequest', +'UpdateStoredInfoTypeRequest', +'Value', +'ValueFrequency', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json b/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json new file mode 100644 index 00000000..df73928b --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json @@ -0,0 +1,363 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.dlp_v2", + "protoPackage": "google.privacy.dlp.v2", + "schema": "1.0", + "services": { + "DlpService": { + "clients": { + "grpc": { + "libraryClient": "DlpServiceClient", + "rpcs": { + "ActivateJobTrigger": { + "methods": [ + "activate_job_trigger" + ] + }, + "CancelDlpJob": { + "methods": [ + "cancel_dlp_job" + ] + }, + "CreateDeidentifyTemplate": { + "methods": [ + "create_deidentify_template" + ] + }, + "CreateDlpJob": { + "methods": [ + "create_dlp_job" + ] + }, + "CreateInspectTemplate": { + "methods": [ + "create_inspect_template" + ] + }, + "CreateJobTrigger": { + "methods": [ + "create_job_trigger" + ] + }, + "CreateStoredInfoType": { + "methods": [ + "create_stored_info_type" + ] + }, + "DeidentifyContent": { + "methods": [ + "deidentify_content" + ] + }, + "DeleteDeidentifyTemplate": { + "methods": [ + "delete_deidentify_template" + ] + }, + "DeleteDlpJob": { + "methods": [ + "delete_dlp_job" + ] + }, + "DeleteInspectTemplate": { + "methods": [ + "delete_inspect_template" + ] + }, + "DeleteJobTrigger": { + "methods": [ + "delete_job_trigger" + ] + }, + "DeleteStoredInfoType": { + "methods": [ + "delete_stored_info_type" + ] + }, + "FinishDlpJob": { + "methods": [ + "finish_dlp_job" + ] + }, + "GetDeidentifyTemplate": { + "methods": [ + "get_deidentify_template" + ] + }, + "GetDlpJob": { + "methods": [ + "get_dlp_job" + ] + }, + "GetInspectTemplate": { + "methods": [ + "get_inspect_template" + ] + }, + "GetJobTrigger": { + "methods": [ + "get_job_trigger" + ] + }, + "GetStoredInfoType": { + "methods": [ + "get_stored_info_type" + ] + }, + "HybridInspectDlpJob": { + "methods": [ + "hybrid_inspect_dlp_job" + ] + }, + "HybridInspectJobTrigger": { + "methods": [ + "hybrid_inspect_job_trigger" + ] + }, + "InspectContent": { + "methods": [ + "inspect_content" + ] + }, + "ListDeidentifyTemplates": { + "methods": [ + "list_deidentify_templates" + ] + }, + "ListDlpJobs": { + "methods": [ + "list_dlp_jobs" + ] + }, + "ListInfoTypes": { + "methods": [ + "list_info_types" + ] + }, + "ListInspectTemplates": { + "methods": [ + "list_inspect_templates" + ] + }, + "ListJobTriggers": { + "methods": [ + "list_job_triggers" + ] + }, + "ListStoredInfoTypes": { + "methods": [ + "list_stored_info_types" + ] + }, + "RedactImage": { + "methods": [ + "redact_image" + ] + }, + "ReidentifyContent": { + "methods": [ + "reidentify_content" + ] + }, + "UpdateDeidentifyTemplate": { + "methods": [ + "update_deidentify_template" + ] + }, + "UpdateInspectTemplate": { + "methods": [ + "update_inspect_template" + ] + }, + "UpdateJobTrigger": { + "methods": [ + "update_job_trigger" + ] + }, + "UpdateStoredInfoType": { + "methods": [ + "update_stored_info_type" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DlpServiceAsyncClient", + "rpcs": { + "ActivateJobTrigger": { + "methods": [ + "activate_job_trigger" + ] + }, + "CancelDlpJob": { + "methods": [ + "cancel_dlp_job" + ] + }, + "CreateDeidentifyTemplate": { + "methods": [ + "create_deidentify_template" + ] + }, + "CreateDlpJob": { + "methods": [ + "create_dlp_job" + ] + }, + "CreateInspectTemplate": { + "methods": [ + "create_inspect_template" + ] + }, + "CreateJobTrigger": { + "methods": [ + "create_job_trigger" + ] + }, + "CreateStoredInfoType": { + "methods": [ + "create_stored_info_type" + ] + }, + "DeidentifyContent": { + "methods": [ + "deidentify_content" + ] + }, + "DeleteDeidentifyTemplate": { + "methods": [ + "delete_deidentify_template" + ] + }, + "DeleteDlpJob": { + "methods": [ + "delete_dlp_job" + ] + }, + "DeleteInspectTemplate": { + "methods": [ + "delete_inspect_template" + ] + }, + "DeleteJobTrigger": { + "methods": [ + "delete_job_trigger" + ] + }, + "DeleteStoredInfoType": { + "methods": [ + "delete_stored_info_type" + ] + }, + "FinishDlpJob": { + "methods": [ + "finish_dlp_job" + ] + }, + "GetDeidentifyTemplate": { + "methods": [ + "get_deidentify_template" + ] + }, + "GetDlpJob": { + "methods": [ + "get_dlp_job" + ] + }, + "GetInspectTemplate": { + "methods": [ + "get_inspect_template" + ] + }, + "GetJobTrigger": { + "methods": [ + "get_job_trigger" + ] + }, + "GetStoredInfoType": { + "methods": [ + "get_stored_info_type" + ] + }, + "HybridInspectDlpJob": { + "methods": [ + "hybrid_inspect_dlp_job" + ] + }, + "HybridInspectJobTrigger": { + "methods": [ + "hybrid_inspect_job_trigger" + ] + }, + "InspectContent": { + "methods": [ + "inspect_content" + ] + }, + "ListDeidentifyTemplates": { + "methods": [ + "list_deidentify_templates" + ] + }, + "ListDlpJobs": { + "methods": [ + "list_dlp_jobs" + ] + }, + "ListInfoTypes": { + "methods": [ + "list_info_types" + ] + }, + "ListInspectTemplates": { + "methods": [ + "list_inspect_templates" + ] + }, + "ListJobTriggers": { + "methods": [ + "list_job_triggers" + ] + }, + "ListStoredInfoTypes": { + "methods": [ + "list_stored_info_types" + ] + }, + "RedactImage": { + "methods": [ + "redact_image" + ] + }, + "ReidentifyContent": { + "methods": [ + "reidentify_content" + ] + }, + "UpdateDeidentifyTemplate": { + "methods": [ + "update_deidentify_template" + ] + }, + "UpdateInspectTemplate": { + "methods": [ + "update_inspect_template" + ] + }, + "UpdateJobTrigger": { + "methods": [ + "update_job_trigger" + ] + }, + "UpdateStoredInfoType": { + "methods": [ + "update_stored_info_type" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/py.typed b/owl-bot-staging/v2/google/cloud/dlp_v2/py.typed new file mode 100644 index 00000000..23d89ef3 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-dlp package uses inline types. diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py new file mode 100644 index 00000000..4de65971 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py new file mode 100644 index 00000000..161801ef --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import DlpServiceClient +from .async_client import DlpServiceAsyncClient + +__all__ = ( + 'DlpServiceClient', + 'DlpServiceAsyncClient', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py new file mode 100644 index 00000000..efcf4735 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py @@ -0,0 +1,3237 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dlp_v2.services.dlp_service import pagers +from google.cloud.dlp_v2.types import dlp +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DlpServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import DlpServiceGrpcAsyncIOTransport +from .client import DlpServiceClient + + +class DlpServiceAsyncClient: + """The Cloud Data Loss Prevention (DLP) API is a service that + allows clients to detect the presence of Personally Identifiable + Information (PII) and other privacy-sensitive data in user- + supplied, unstructured data streams, like text blocks or images. + The service also includes methods for sensitive data redaction + and scheduling of data scans on Google Cloud Platform based data + sets. + To learn more about concepts and find how-to guides see + https://cloud.google.com/dlp/docs/. + """ + + _client: DlpServiceClient + + DEFAULT_ENDPOINT = DlpServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DlpServiceClient.DEFAULT_MTLS_ENDPOINT + + deidentify_template_path = staticmethod(DlpServiceClient.deidentify_template_path) + parse_deidentify_template_path = staticmethod(DlpServiceClient.parse_deidentify_template_path) + dlp_content_path = staticmethod(DlpServiceClient.dlp_content_path) + parse_dlp_content_path = staticmethod(DlpServiceClient.parse_dlp_content_path) + dlp_job_path = staticmethod(DlpServiceClient.dlp_job_path) + parse_dlp_job_path = staticmethod(DlpServiceClient.parse_dlp_job_path) + finding_path = staticmethod(DlpServiceClient.finding_path) + parse_finding_path = staticmethod(DlpServiceClient.parse_finding_path) + inspect_template_path = staticmethod(DlpServiceClient.inspect_template_path) + parse_inspect_template_path = staticmethod(DlpServiceClient.parse_inspect_template_path) + job_trigger_path = staticmethod(DlpServiceClient.job_trigger_path) + parse_job_trigger_path = staticmethod(DlpServiceClient.parse_job_trigger_path) + stored_info_type_path = staticmethod(DlpServiceClient.stored_info_type_path) + parse_stored_info_type_path = staticmethod(DlpServiceClient.parse_stored_info_type_path) + common_billing_account_path = staticmethod(DlpServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(DlpServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(DlpServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(DlpServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(DlpServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(DlpServiceClient.parse_common_organization_path) + common_project_path = staticmethod(DlpServiceClient.common_project_path) + parse_common_project_path = staticmethod(DlpServiceClient.parse_common_project_path) + common_location_path = staticmethod(DlpServiceClient.common_location_path) + parse_common_location_path = staticmethod(DlpServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DlpServiceAsyncClient: The constructed client. + """ + return DlpServiceClient.from_service_account_info.__func__(DlpServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DlpServiceAsyncClient: The constructed client. + """ + return DlpServiceClient.from_service_account_file.__func__(DlpServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DlpServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DlpServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(DlpServiceClient).get_transport_class, type(DlpServiceClient)) + + def __init__(self, *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, DlpServiceTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the dlp service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DlpServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DlpServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def inspect_content(self, + request: dlp.InspectContentRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectContentResponse: + r"""Finds potentially sensitive info in content. + This method has limits on input size, processing time, + and output size. + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + For how to guides, see + https://cloud.google.com/dlp/docs/inspecting-images and + https://cloud.google.com/dlp/docs/inspecting-text, + + Args: + request (:class:`google.cloud.dlp_v2.types.InspectContentRequest`): + The request object. Request to search for potentially + sensitive info in a ContentItem. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectContentResponse: + Results of inspecting an item. + """ + # Create or coerce a protobuf request object. + request = dlp.InspectContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.inspect_content, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def redact_image(self, + request: dlp.RedactImageRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.RedactImageResponse: + r"""Redacts potentially sensitive info from an image. + This method has limits on input size, processing time, + and output size. See + https://cloud.google.com/dlp/docs/redacting-sensitive- + data-images to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Args: + request (:class:`google.cloud.dlp_v2.types.RedactImageRequest`): + The request object. Request to search for potentially + sensitive info in an image and redact it by covering it + with a colored rectangle. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.RedactImageResponse: + Results of redacting an image. + """ + # Create or coerce a protobuf request object. + request = dlp.RedactImageRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.redact_image, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def deidentify_content(self, + request: dlp.DeidentifyContentRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyContentResponse: + r"""De-identifies potentially sensitive info from a + ContentItem. This method has limits on input size and + output size. See + https://cloud.google.com/dlp/docs/deidentify-sensitive- + data to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Args: + request (:class:`google.cloud.dlp_v2.types.DeidentifyContentRequest`): + The request object. Request to de-identify a list of + items. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyContentResponse: + Results of de-identifying a + ContentItem. + + """ + # Create or coerce a protobuf request object. + request = dlp.DeidentifyContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.deidentify_content, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def reidentify_content(self, + request: dlp.ReidentifyContentRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ReidentifyContentResponse: + r"""Re-identifies content that has been de-identified. See + https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example + to learn more. + + Args: + request (:class:`google.cloud.dlp_v2.types.ReidentifyContentRequest`): + The request object. Request to re-identify an item. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.ReidentifyContentResponse: + Results of re-identifying a item. + """ + # Create or coerce a protobuf request object. + request = dlp.ReidentifyContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.reidentify_content, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_info_types(self, + request: dlp.ListInfoTypesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ListInfoTypesResponse: + r"""Returns a list of the sensitive information types + that the DLP API supports. See + https://cloud.google.com/dlp/docs/infotypes-reference to + learn more. + + Args: + request (:class:`google.cloud.dlp_v2.types.ListInfoTypesRequest`): + The request object. Request for the list of infoTypes. + parent (:class:`str`): + The parent resource name. + + The format of this value is as follows: + + :: + + locations/LOCATION_ID + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.ListInfoTypesResponse: + Response to the ListInfoTypes + request. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.ListInfoTypesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_info_types, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_inspect_template(self, + request: dlp.CreateInspectTemplateRequest = None, + *, + parent: str = None, + inspect_template: dlp.InspectTemplate = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Creates an InspectTemplate for re-using frequently + used configuration for inspecting content, images, and + storage. See https://cloud.google.com/dlp/docs/creating- + templates to learn more. + + Args: + request (:class:`google.cloud.dlp_v2.types.CreateInspectTemplateRequest`): + The request object. Request message for + CreateInspectTemplate. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_template (:class:`google.cloud.dlp_v2.types.InspectTemplate`): + Required. The InspectTemplate to + create. + + This corresponds to the ``inspect_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts- + templates to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, inspect_template]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.CreateInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if inspect_template is not None: + request.inspect_template = inspect_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_inspect_template, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_inspect_template(self, + request: dlp.UpdateInspectTemplateRequest = None, + *, + name: str = None, + inspect_template: dlp.InspectTemplate = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Updates the InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Args: + request (:class:`google.cloud.dlp_v2.types.UpdateInspectTemplateRequest`): + The request object. Request message for + UpdateInspectTemplate. + name (:class:`str`): + Required. Resource name of organization and + inspectTemplate to be updated, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_template (:class:`google.cloud.dlp_v2.types.InspectTemplate`): + New InspectTemplate value. + This corresponds to the ``inspect_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts- + templates to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, inspect_template, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.UpdateInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if inspect_template is not None: + request.inspect_template = inspect_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_inspect_template, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_inspect_template(self, + request: dlp.GetInspectTemplateRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Gets an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Args: + request (:class:`google.cloud.dlp_v2.types.GetInspectTemplateRequest`): + The request object. Request message for + GetInspectTemplate. + name (:class:`str`): + Required. Resource name of the organization and + inspectTemplate to be read, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts- + templates to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.GetInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_inspect_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_inspect_templates(self, + request: dlp.ListInspectTemplatesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInspectTemplatesAsyncPager: + r"""Lists InspectTemplates. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Args: + request (:class:`google.cloud.dlp_v2.types.ListInspectTemplatesRequest`): + The request object. Request message for + ListInspectTemplates. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesAsyncPager: + Response message for + ListInspectTemplates. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.ListInspectTemplatesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_inspect_templates, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListInspectTemplatesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_inspect_template(self, + request: dlp.DeleteInspectTemplateRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Args: + request (:class:`google.cloud.dlp_v2.types.DeleteInspectTemplateRequest`): + The request object. Request message for + DeleteInspectTemplate. + name (:class:`str`): + Required. Resource name of the organization and + inspectTemplate to be deleted, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.DeleteInspectTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_inspect_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_deidentify_template(self, + request: dlp.CreateDeidentifyTemplateRequest = None, + *, + parent: str = None, + deidentify_template: dlp.DeidentifyTemplate = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Creates a DeidentifyTemplate for re-using frequently + used configuration for de-identifying content, images, + and storage. See + https://cloud.google.com/dlp/docs/creating-templates- + deid to learn more. + + Args: + request (:class:`google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest`): + The request object. Request message for + CreateDeidentifyTemplate. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deidentify_template (:class:`google.cloud.dlp_v2.types.DeidentifyTemplate`): + Required. The DeidentifyTemplate to + create. + + This corresponds to the ``deidentify_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts- + templates to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, deidentify_template]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.CreateDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if deidentify_template is not None: + request.deidentify_template = deidentify_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_deidentify_template, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_deidentify_template(self, + request: dlp.UpdateDeidentifyTemplateRequest = None, + *, + name: str = None, + deidentify_template: dlp.DeidentifyTemplate = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Updates the DeidentifyTemplate. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Args: + request (:class:`google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest`): + The request object. Request message for + UpdateDeidentifyTemplate. + name (:class:`str`): + Required. Resource name of organization and deidentify + template to be updated, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deidentify_template (:class:`google.cloud.dlp_v2.types.DeidentifyTemplate`): + New DeidentifyTemplate value. + This corresponds to the ``deidentify_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts- + templates to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, deidentify_template, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.UpdateDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if deidentify_template is not None: + request.deidentify_template = deidentify_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_deidentify_template, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_deidentify_template(self, + request: dlp.GetDeidentifyTemplateRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Gets a DeidentifyTemplate. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Args: + request (:class:`google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest`): + The request object. Request message for + GetDeidentifyTemplate. + name (:class:`str`): + Required. Resource name of the organization and + deidentify template to be read, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts- + templates to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.GetDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_deidentify_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_deidentify_templates(self, + request: dlp.ListDeidentifyTemplatesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDeidentifyTemplatesAsyncPager: + r"""Lists DeidentifyTemplates. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Args: + request (:class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest`): + The request object. Request message for + ListDeidentifyTemplates. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesAsyncPager: + Response message for + ListDeidentifyTemplates. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.ListDeidentifyTemplatesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_deidentify_templates, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDeidentifyTemplatesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_deidentify_template(self, + request: dlp.DeleteDeidentifyTemplateRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a DeidentifyTemplate. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Args: + request (:class:`google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest`): + The request object. Request message for + DeleteDeidentifyTemplate. + name (:class:`str`): + Required. Resource name of the organization and + deidentify template to be deleted, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.DeleteDeidentifyTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_deidentify_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_job_trigger(self, + request: dlp.CreateJobTriggerRequest = None, + *, + parent: str = None, + job_trigger: dlp.JobTrigger = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Creates a job trigger to run DLP actions such as + scanning storage for sensitive information on a set + schedule. See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Args: + request (:class:`google.cloud.dlp_v2.types.CreateJobTriggerRequest`): + The request object. Request message for + CreateJobTrigger. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_trigger (:class:`google.cloud.dlp_v2.types.JobTrigger`): + Required. The JobTrigger to create. + This corresponds to the ``job_trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts- + job-triggers to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, job_trigger]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.CreateJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if job_trigger is not None: + request.job_trigger = job_trigger + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_job_trigger, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_job_trigger(self, + request: dlp.UpdateJobTriggerRequest = None, + *, + name: str = None, + job_trigger: dlp.JobTrigger = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Updates a job trigger. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Args: + request (:class:`google.cloud.dlp_v2.types.UpdateJobTriggerRequest`): + The request object. Request message for + UpdateJobTrigger. + name (:class:`str`): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_trigger (:class:`google.cloud.dlp_v2.types.JobTrigger`): + New JobTrigger value. + This corresponds to the ``job_trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts- + job-triggers to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, job_trigger, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.UpdateJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if job_trigger is not None: + request.job_trigger = job_trigger + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_job_trigger, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def hybrid_inspect_job_trigger(self, + request: dlp.HybridInspectJobTriggerRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.HybridInspectResponse: + r"""Inspect hybrid content and store findings to a + trigger. The inspection will be processed + asynchronously. To review the findings monitor the jobs + within the trigger. + Early access feature is in a pre-release state and might + change or have limited support. For more information, + see + https://cloud.google.com/products#product-launch-stages. + + Args: + request (:class:`google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest`): + The request object. Request to search for potentially + sensitive info in a custom location. + name (:class:`str`): + Required. Resource name of the trigger to execute a + hybrid inspect on, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.HybridInspectJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.hybrid_inspect_job_trigger, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_job_trigger(self, + request: dlp.GetJobTriggerRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Gets a job trigger. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Args: + request (:class:`google.cloud.dlp_v2.types.GetJobTriggerRequest`): + The request object. Request message for GetJobTrigger. + name (:class:`str`): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts- + job-triggers to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.GetJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_job_trigger, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_job_triggers(self, + request: dlp.ListJobTriggersRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobTriggersAsyncPager: + r"""Lists job triggers. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Args: + request (:class:`google.cloud.dlp_v2.types.ListJobTriggersRequest`): + The request object. Request message for ListJobTriggers. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersAsyncPager: + Response message for ListJobTriggers. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.ListJobTriggersRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_job_triggers, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListJobTriggersAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_job_trigger(self, + request: dlp.DeleteJobTriggerRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a job trigger. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Args: + request (:class:`google.cloud.dlp_v2.types.DeleteJobTriggerRequest`): + The request object. Request message for + DeleteJobTrigger. + name (:class:`str`): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.DeleteJobTriggerRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_job_trigger, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def activate_job_trigger(self, + request: dlp.ActivateJobTriggerRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Activate a job trigger. Causes the immediate execute + of a trigger instead of waiting on the trigger event to + occur. + + Args: + request (:class:`google.cloud.dlp_v2.types.ActivateJobTriggerRequest`): + The request object. Request message for + ActivateJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + request = dlp.ActivateJobTriggerRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.activate_job_trigger, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_dlp_job(self, + request: dlp.CreateDlpJobRequest = None, + *, + parent: str = None, + inspect_job: dlp.InspectJobConfig = None, + risk_job: dlp.RiskAnalysisJobConfig = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Creates a new job to inspect storage or calculate + risk metrics. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + When no InfoTypes or CustomInfoTypes are specified in + inspect jobs, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Args: + request (:class:`google.cloud.dlp_v2.types.CreateDlpJobRequest`): + The request object. Request message for + CreateDlpJobRequest. Used to initiate long running jobs + such as calculating risk metrics or inspecting Google + Cloud Storage. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_job (:class:`google.cloud.dlp_v2.types.InspectJobConfig`): + Set to control what and how to + inspect. + + This corresponds to the ``inspect_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + risk_job (:class:`google.cloud.dlp_v2.types.RiskAnalysisJobConfig`): + Set to choose what metric to + calculate. + + This corresponds to the ``risk_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, inspect_job, risk_job]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.CreateDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if inspect_job is not None: + request.inspect_job = inspect_job + if risk_job is not None: + request.risk_job = risk_job + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_dlp_job, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_dlp_jobs(self, + request: dlp.ListDlpJobsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDlpJobsAsyncPager: + r"""Lists DlpJobs that match the specified filter in the + request. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Args: + request (:class:`google.cloud.dlp_v2.types.ListDlpJobsRequest`): + The request object. The request message for listing DLP + jobs. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsAsyncPager: + The response message for listing DLP + jobs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.ListDlpJobsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_dlp_jobs, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDlpJobsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_dlp_job(self, + request: dlp.GetDlpJobRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Gets the latest state of a long-running DlpJob. + See https://cloud.google.com/dlp/docs/inspecting-storage + and https://cloud.google.com/dlp/docs/compute-risk- + analysis to learn more. + + Args: + request (:class:`google.cloud.dlp_v2.types.GetDlpJobRequest`): + The request object. The request message for + [DlpJobs.GetDlpJob][]. + name (:class:`str`): + Required. The name of the DlpJob + resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.GetDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_dlp_job, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_dlp_job(self, + request: dlp.DeleteDlpJobRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running DlpJob. This method indicates + that the client is no longer interested in the DlpJob + result. The job will be cancelled if possible. + See https://cloud.google.com/dlp/docs/inspecting-storage + and https://cloud.google.com/dlp/docs/compute-risk- + analysis to learn more. + + Args: + request (:class:`google.cloud.dlp_v2.types.DeleteDlpJobRequest`): + The request object. The request message for deleting a + DLP job. + name (:class:`str`): + Required. The name of the DlpJob + resource to be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.DeleteDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_dlp_job, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_dlp_job(self, + request: dlp.CancelDlpJobRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running + DlpJob. The server makes a best effort to cancel the + DlpJob, but success is not guaranteed. + See https://cloud.google.com/dlp/docs/inspecting-storage + and https://cloud.google.com/dlp/docs/compute-risk- + analysis to learn more. + + Args: + request (:class:`google.cloud.dlp_v2.types.CancelDlpJobRequest`): + The request object. The request message for canceling a + DLP job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + request = dlp.CancelDlpJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.cancel_dlp_job, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_stored_info_type(self, + request: dlp.CreateStoredInfoTypeRequest = None, + *, + parent: str = None, + config: dlp.StoredInfoTypeConfig = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Creates a pre-built stored infoType to be used for + inspection. See + https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Args: + request (:class:`google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest`): + The request object. Request message for + CreateStoredInfoType. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config (:class:`google.cloud.dlp_v2.types.StoredInfoTypeConfig`): + Required. Configuration of the + storedInfoType to create. + + This corresponds to the ``config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, config]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.CreateStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if config is not None: + request.config = config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_stored_info_type, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_stored_info_type(self, + request: dlp.UpdateStoredInfoTypeRequest = None, + *, + name: str = None, + config: dlp.StoredInfoTypeConfig = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Updates the stored infoType by creating a new + version. The existing version will continue to be used + until the new version is ready. See + https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Args: + request (:class:`google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest`): + The request object. Request message for + UpdateStoredInfoType. + name (:class:`str`): + Required. Resource name of organization and + storedInfoType to be updated, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config (:class:`google.cloud.dlp_v2.types.StoredInfoTypeConfig`): + Updated configuration for the + storedInfoType. If not provided, a new + version of the storedInfoType will be + created with the existing configuration. + + This corresponds to the ``config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.UpdateStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if config is not None: + request.config = config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_stored_info_type, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_stored_info_type(self, + request: dlp.GetStoredInfoTypeRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Gets a stored infoType. + See https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Args: + request (:class:`google.cloud.dlp_v2.types.GetStoredInfoTypeRequest`): + The request object. Request message for + GetStoredInfoType. + name (:class:`str`): + Required. Resource name of the organization and + storedInfoType to be read, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.GetStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_stored_info_type, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_stored_info_types(self, + request: dlp.ListStoredInfoTypesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListStoredInfoTypesAsyncPager: + r"""Lists stored infoTypes. + See https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Args: + request (:class:`google.cloud.dlp_v2.types.ListStoredInfoTypesRequest`): + The request object. Request message for + ListStoredInfoTypes. + parent (:class:`str`): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesAsyncPager: + Response message for + ListStoredInfoTypes. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.ListStoredInfoTypesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_stored_info_types, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListStoredInfoTypesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_stored_info_type(self, + request: dlp.DeleteStoredInfoTypeRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a stored infoType. + See https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Args: + request (:class:`google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest`): + The request object. Request message for + DeleteStoredInfoType. + name (:class:`str`): + Required. Resource name of the organization and + storedInfoType to be deleted, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.DeleteStoredInfoTypeRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_stored_info_type, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def hybrid_inspect_dlp_job(self, + request: dlp.HybridInspectDlpJobRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.HybridInspectResponse: + r"""Inspect hybrid content and store findings to a job. + To review the findings inspect the job. Inspection will + occur asynchronously. + Early access feature is in a pre-release state and might + change or have limited support. For more information, + see + https://cloud.google.com/products#product-launch-stages. + + Args: + request (:class:`google.cloud.dlp_v2.types.HybridInspectDlpJobRequest`): + The request object. Request to search for potentially + sensitive info in a custom location. + name (:class:`str`): + Required. Resource name of the job to execute a hybrid + inspect on, for example + ``projects/dlp-test-project/dlpJob/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = dlp.HybridInspectDlpJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.hybrid_inspect_dlp_job, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def finish_dlp_job(self, + request: dlp.FinishDlpJobRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Finish a running hybrid DlpJob. Triggers the + finalization steps and running of any enabled actions + that have not yet run. Early access feature is in a pre- + release state and might change or have limited support. + For more information, see + https://cloud.google.com/products#product-launch-stages. + + Args: + request (:class:`google.cloud.dlp_v2.types.FinishDlpJobRequest`): + The request object. The request message for finishing a + DLP hybrid job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + request = dlp.FinishDlpJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.finish_dlp_job, + default_timeout=300.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-dlp", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "DlpServiceAsyncClient", +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py new file mode 100644 index 00000000..288258aa --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py @@ -0,0 +1,3345 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dlp_v2.services.dlp_service import pagers +from google.cloud.dlp_v2.types import dlp +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import DlpServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import DlpServiceGrpcTransport +from .transports.grpc_asyncio import DlpServiceGrpcAsyncIOTransport + + +class DlpServiceClientMeta(type): + """Metaclass for the DlpService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[DlpServiceTransport]] + _transport_registry["grpc"] = DlpServiceGrpcTransport + _transport_registry["grpc_asyncio"] = DlpServiceGrpcAsyncIOTransport + + def get_transport_class(cls, + label: str = None, + ) -> Type[DlpServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DlpServiceClient(metaclass=DlpServiceClientMeta): + """The Cloud Data Loss Prevention (DLP) API is a service that + allows clients to detect the presence of Personally Identifiable + Information (PII) and other privacy-sensitive data in user- + supplied, unstructured data streams, like text blocks or images. + The service also includes methods for sensitive data redaction + and scheduling of data scans on Google Cloud Platform based data + sets. + To learn more about concepts and find how-to guides see + https://cloud.google.com/dlp/docs/. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "dlp.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DlpServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DlpServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DlpServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DlpServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def deidentify_template_path(organization: str,deidentify_template: str,) -> str: + """Returns a fully-qualified deidentify_template string.""" + return "organizations/{organization}/deidentifyTemplates/{deidentify_template}".format(organization=organization, deidentify_template=deidentify_template, ) + + @staticmethod + def parse_deidentify_template_path(path: str) -> Dict[str,str]: + """Parses a deidentify_template path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)/deidentifyTemplates/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def dlp_content_path(project: str,) -> str: + """Returns a fully-qualified dlp_content string.""" + return "projects/{project}/dlpContent".format(project=project, ) + + @staticmethod + def parse_dlp_content_path(path: str) -> Dict[str,str]: + """Parses a dlp_content path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/dlpContent$", path) + return m.groupdict() if m else {} + + @staticmethod + def dlp_job_path(project: str,dlp_job: str,) -> str: + """Returns a fully-qualified dlp_job string.""" + return "projects/{project}/dlpJobs/{dlp_job}".format(project=project, dlp_job=dlp_job, ) + + @staticmethod + def parse_dlp_job_path(path: str) -> Dict[str,str]: + """Parses a dlp_job path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/dlpJobs/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def finding_path(project: str,location: str,finding: str,) -> str: + """Returns a fully-qualified finding string.""" + return "projects/{project}/locations/{location}/findings/{finding}".format(project=project, location=location, finding=finding, ) + + @staticmethod + def parse_finding_path(path: str) -> Dict[str,str]: + """Parses a finding path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/findings/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def inspect_template_path(organization: str,inspect_template: str,) -> str: + """Returns a fully-qualified inspect_template string.""" + return "organizations/{organization}/inspectTemplates/{inspect_template}".format(organization=organization, inspect_template=inspect_template, ) + + @staticmethod + def parse_inspect_template_path(path: str) -> Dict[str,str]: + """Parses a inspect_template path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)/inspectTemplates/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def job_trigger_path(project: str,job_trigger: str,) -> str: + """Returns a fully-qualified job_trigger string.""" + return "projects/{project}/jobTriggers/{job_trigger}".format(project=project, job_trigger=job_trigger, ) + + @staticmethod + def parse_job_trigger_path(path: str) -> Dict[str,str]: + """Parses a job_trigger path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/jobTriggers/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def stored_info_type_path(organization: str,stored_info_type: str,) -> str: + """Returns a fully-qualified stored_info_type string.""" + return "organizations/{organization}/storedInfoTypes/{stored_info_type}".format(organization=organization, stored_info_type=stored_info_type, ) + + @staticmethod + def parse_stored_info_type_path(path: str) -> Dict[str,str]: + """Parses a stored_info_type path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)/storedInfoTypes/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, DlpServiceTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the dlp service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, DlpServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, DlpServiceTransport): + # transport is a DlpServiceTransport instance. + if credentials or client_options.credentials_file: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + ) + + def inspect_content(self, + request: dlp.InspectContentRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectContentResponse: + r"""Finds potentially sensitive info in content. + This method has limits on input size, processing time, + and output size. + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + For how to guides, see + https://cloud.google.com/dlp/docs/inspecting-images and + https://cloud.google.com/dlp/docs/inspecting-text, + + Args: + request (google.cloud.dlp_v2.types.InspectContentRequest): + The request object. Request to search for potentially + sensitive info in a ContentItem. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectContentResponse: + Results of inspecting an item. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.InspectContentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.InspectContentRequest): + request = dlp.InspectContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.inspect_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def redact_image(self, + request: dlp.RedactImageRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.RedactImageResponse: + r"""Redacts potentially sensitive info from an image. + This method has limits on input size, processing time, + and output size. See + https://cloud.google.com/dlp/docs/redacting-sensitive- + data-images to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Args: + request (google.cloud.dlp_v2.types.RedactImageRequest): + The request object. Request to search for potentially + sensitive info in an image and redact it by covering it + with a colored rectangle. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.RedactImageResponse: + Results of redacting an image. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.RedactImageRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.RedactImageRequest): + request = dlp.RedactImageRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.redact_image] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def deidentify_content(self, + request: dlp.DeidentifyContentRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyContentResponse: + r"""De-identifies potentially sensitive info from a + ContentItem. This method has limits on input size and + output size. See + https://cloud.google.com/dlp/docs/deidentify-sensitive- + data to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Args: + request (google.cloud.dlp_v2.types.DeidentifyContentRequest): + The request object. Request to de-identify a list of + items. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyContentResponse: + Results of de-identifying a + ContentItem. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeidentifyContentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeidentifyContentRequest): + request = dlp.DeidentifyContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.deidentify_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def reidentify_content(self, + request: dlp.ReidentifyContentRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ReidentifyContentResponse: + r"""Re-identifies content that has been de-identified. See + https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example + to learn more. + + Args: + request (google.cloud.dlp_v2.types.ReidentifyContentRequest): + The request object. Request to re-identify an item. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.ReidentifyContentResponse: + Results of re-identifying a item. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ReidentifyContentRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ReidentifyContentRequest): + request = dlp.ReidentifyContentRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.reidentify_content] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_info_types(self, + request: dlp.ListInfoTypesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.ListInfoTypesResponse: + r"""Returns a list of the sensitive information types + that the DLP API supports. See + https://cloud.google.com/dlp/docs/infotypes-reference to + learn more. + + Args: + request (google.cloud.dlp_v2.types.ListInfoTypesRequest): + The request object. Request for the list of infoTypes. + parent (str): + The parent resource name. + + The format of this value is as follows: + + :: + + locations/LOCATION_ID + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.ListInfoTypesResponse: + Response to the ListInfoTypes + request. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListInfoTypesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListInfoTypesRequest): + request = dlp.ListInfoTypesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_info_types] + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_inspect_template(self, + request: dlp.CreateInspectTemplateRequest = None, + *, + parent: str = None, + inspect_template: dlp.InspectTemplate = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Creates an InspectTemplate for re-using frequently + used configuration for inspecting content, images, and + storage. See https://cloud.google.com/dlp/docs/creating- + templates to learn more. + + Args: + request (google.cloud.dlp_v2.types.CreateInspectTemplateRequest): + The request object. Request message for + CreateInspectTemplate. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_template (google.cloud.dlp_v2.types.InspectTemplate): + Required. The InspectTemplate to + create. + + This corresponds to the ``inspect_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts- + templates to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, inspect_template]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CreateInspectTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CreateInspectTemplateRequest): + request = dlp.CreateInspectTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if inspect_template is not None: + request.inspect_template = inspect_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_inspect_template(self, + request: dlp.UpdateInspectTemplateRequest = None, + *, + name: str = None, + inspect_template: dlp.InspectTemplate = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Updates the InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Args: + request (google.cloud.dlp_v2.types.UpdateInspectTemplateRequest): + The request object. Request message for + UpdateInspectTemplate. + name (str): + Required. Resource name of organization and + inspectTemplate to be updated, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_template (google.cloud.dlp_v2.types.InspectTemplate): + New InspectTemplate value. + This corresponds to the ``inspect_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts- + templates to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, inspect_template, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.UpdateInspectTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.UpdateInspectTemplateRequest): + request = dlp.UpdateInspectTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if inspect_template is not None: + request.inspect_template = inspect_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_inspect_template(self, + request: dlp.GetInspectTemplateRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.InspectTemplate: + r"""Gets an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Args: + request (google.cloud.dlp_v2.types.GetInspectTemplateRequest): + The request object. Request message for + GetInspectTemplate. + name (str): + Required. Resource name of the organization and + inspectTemplate to be read, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.InspectTemplate: + The inspectTemplate contains a + configuration (set of types of sensitive + data to be detected) to be used anywhere + you otherwise would normally specify + InspectConfig. See + https://cloud.google.com/dlp/docs/concepts- + templates to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.GetInspectTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.GetInspectTemplateRequest): + request = dlp.GetInspectTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_inspect_templates(self, + request: dlp.ListInspectTemplatesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListInspectTemplatesPager: + r"""Lists InspectTemplates. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Args: + request (google.cloud.dlp_v2.types.ListInspectTemplatesRequest): + The request object. Request message for + ListInspectTemplates. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesPager: + Response message for + ListInspectTemplates. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListInspectTemplatesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListInspectTemplatesRequest): + request = dlp.ListInspectTemplatesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_inspect_templates] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListInspectTemplatesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_inspect_template(self, + request: dlp.DeleteInspectTemplateRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Args: + request (google.cloud.dlp_v2.types.DeleteInspectTemplateRequest): + The request object. Request message for + DeleteInspectTemplate. + name (str): + Required. Resource name of the organization and + inspectTemplate to be deleted, for example + ``organizations/433245324/inspectTemplates/432452342`` + or projects/project-id/inspectTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeleteInspectTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeleteInspectTemplateRequest): + request = dlp.DeleteInspectTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_inspect_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_deidentify_template(self, + request: dlp.CreateDeidentifyTemplateRequest = None, + *, + parent: str = None, + deidentify_template: dlp.DeidentifyTemplate = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Creates a DeidentifyTemplate for re-using frequently + used configuration for de-identifying content, images, + and storage. See + https://cloud.google.com/dlp/docs/creating-templates- + deid to learn more. + + Args: + request (google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest): + The request object. Request message for + CreateDeidentifyTemplate. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): + Required. The DeidentifyTemplate to + create. + + This corresponds to the ``deidentify_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts- + templates to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, deidentify_template]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CreateDeidentifyTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CreateDeidentifyTemplateRequest): + request = dlp.CreateDeidentifyTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if deidentify_template is not None: + request.deidentify_template = deidentify_template + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_deidentify_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_deidentify_template(self, + request: dlp.UpdateDeidentifyTemplateRequest = None, + *, + name: str = None, + deidentify_template: dlp.DeidentifyTemplate = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Updates the DeidentifyTemplate. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Args: + request (google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest): + The request object. Request message for + UpdateDeidentifyTemplate. + name (str): + Required. Resource name of organization and deidentify + template to be updated, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): + New DeidentifyTemplate value. + This corresponds to the ``deidentify_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts- + templates to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, deidentify_template, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.UpdateDeidentifyTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.UpdateDeidentifyTemplateRequest): + request = dlp.UpdateDeidentifyTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if deidentify_template is not None: + request.deidentify_template = deidentify_template + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_deidentify_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_deidentify_template(self, + request: dlp.GetDeidentifyTemplateRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DeidentifyTemplate: + r"""Gets a DeidentifyTemplate. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Args: + request (google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest): + The request object. Request message for + GetDeidentifyTemplate. + name (str): + Required. Resource name of the organization and + deidentify template to be read, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DeidentifyTemplate: + DeidentifyTemplates contains + instructions on how to de-identify + content. See + https://cloud.google.com/dlp/docs/concepts- + templates to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.GetDeidentifyTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.GetDeidentifyTemplateRequest): + request = dlp.GetDeidentifyTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_deidentify_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_deidentify_templates(self, + request: dlp.ListDeidentifyTemplatesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDeidentifyTemplatesPager: + r"""Lists DeidentifyTemplates. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Args: + request (google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest): + The request object. Request message for + ListDeidentifyTemplates. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesPager: + Response message for + ListDeidentifyTemplates. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListDeidentifyTemplatesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListDeidentifyTemplatesRequest): + request = dlp.ListDeidentifyTemplatesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_deidentify_templates] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDeidentifyTemplatesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_deidentify_template(self, + request: dlp.DeleteDeidentifyTemplateRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a DeidentifyTemplate. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Args: + request (google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest): + The request object. Request message for + DeleteDeidentifyTemplate. + name (str): + Required. Resource name of the organization and + deidentify template to be deleted, for example + ``organizations/433245324/deidentifyTemplates/432452342`` + or projects/project-id/deidentifyTemplates/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeleteDeidentifyTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeleteDeidentifyTemplateRequest): + request = dlp.DeleteDeidentifyTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_deidentify_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_job_trigger(self, + request: dlp.CreateJobTriggerRequest = None, + *, + parent: str = None, + job_trigger: dlp.JobTrigger = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Creates a job trigger to run DLP actions such as + scanning storage for sensitive information on a set + schedule. See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Args: + request (google.cloud.dlp_v2.types.CreateJobTriggerRequest): + The request object. Request message for + CreateJobTrigger. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_trigger (google.cloud.dlp_v2.types.JobTrigger): + Required. The JobTrigger to create. + This corresponds to the ``job_trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts- + job-triggers to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, job_trigger]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CreateJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CreateJobTriggerRequest): + request = dlp.CreateJobTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if job_trigger is not None: + request.job_trigger = job_trigger + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_job_trigger(self, + request: dlp.UpdateJobTriggerRequest = None, + *, + name: str = None, + job_trigger: dlp.JobTrigger = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Updates a job trigger. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Args: + request (google.cloud.dlp_v2.types.UpdateJobTriggerRequest): + The request object. Request message for + UpdateJobTrigger. + name (str): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_trigger (google.cloud.dlp_v2.types.JobTrigger): + New JobTrigger value. + This corresponds to the ``job_trigger`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts- + job-triggers to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, job_trigger, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.UpdateJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.UpdateJobTriggerRequest): + request = dlp.UpdateJobTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if job_trigger is not None: + request.job_trigger = job_trigger + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def hybrid_inspect_job_trigger(self, + request: dlp.HybridInspectJobTriggerRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.HybridInspectResponse: + r"""Inspect hybrid content and store findings to a + trigger. The inspection will be processed + asynchronously. To review the findings monitor the jobs + within the trigger. + Early access feature is in a pre-release state and might + change or have limited support. For more information, + see + https://cloud.google.com/products#product-launch-stages. + + Args: + request (google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest): + The request object. Request to search for potentially + sensitive info in a custom location. + name (str): + Required. Resource name of the trigger to execute a + hybrid inspect on, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.HybridInspectJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.HybridInspectJobTriggerRequest): + request = dlp.HybridInspectJobTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.hybrid_inspect_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_job_trigger(self, + request: dlp.GetJobTriggerRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.JobTrigger: + r"""Gets a job trigger. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Args: + request (google.cloud.dlp_v2.types.GetJobTriggerRequest): + The request object. Request message for GetJobTrigger. + name (str): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.JobTrigger: + Contains a configuration to make dlp + api calls on a repeating basis. See + https://cloud.google.com/dlp/docs/concepts- + job-triggers to learn more. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.GetJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.GetJobTriggerRequest): + request = dlp.GetJobTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_job_triggers(self, + request: dlp.ListJobTriggersRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobTriggersPager: + r"""Lists job triggers. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Args: + request (google.cloud.dlp_v2.types.ListJobTriggersRequest): + The request object. Request message for ListJobTriggers. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersPager: + Response message for ListJobTriggers. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListJobTriggersRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListJobTriggersRequest): + request = dlp.ListJobTriggersRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_job_triggers] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListJobTriggersPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_job_trigger(self, + request: dlp.DeleteJobTriggerRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a job trigger. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Args: + request (google.cloud.dlp_v2.types.DeleteJobTriggerRequest): + The request object. Request message for + DeleteJobTrigger. + name (str): + Required. Resource name of the project and the + triggeredJob, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeleteJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeleteJobTriggerRequest): + request = dlp.DeleteJobTriggerRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def activate_job_trigger(self, + request: dlp.ActivateJobTriggerRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Activate a job trigger. Causes the immediate execute + of a trigger instead of waiting on the trigger event to + occur. + + Args: + request (google.cloud.dlp_v2.types.ActivateJobTriggerRequest): + The request object. Request message for + ActivateJobTrigger. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ActivateJobTriggerRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ActivateJobTriggerRequest): + request = dlp.ActivateJobTriggerRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.activate_job_trigger] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_dlp_job(self, + request: dlp.CreateDlpJobRequest = None, + *, + parent: str = None, + inspect_job: dlp.InspectJobConfig = None, + risk_job: dlp.RiskAnalysisJobConfig = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Creates a new job to inspect storage or calculate + risk metrics. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + When no InfoTypes or CustomInfoTypes are specified in + inspect jobs, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Args: + request (google.cloud.dlp_v2.types.CreateDlpJobRequest): + The request object. Request message for + CreateDlpJobRequest. Used to initiate long running jobs + such as calculating risk metrics or inspecting Google + Cloud Storage. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): + Set to control what and how to + inspect. + + This corresponds to the ``inspect_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + risk_job (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): + Set to choose what metric to + calculate. + + This corresponds to the ``risk_job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, inspect_job, risk_job]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CreateDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CreateDlpJobRequest): + request = dlp.CreateDlpJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if inspect_job is not None: + request.inspect_job = inspect_job + if risk_job is not None: + request.risk_job = risk_job + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_dlp_jobs(self, + request: dlp.ListDlpJobsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDlpJobsPager: + r"""Lists DlpJobs that match the specified filter in the + request. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Args: + request (google.cloud.dlp_v2.types.ListDlpJobsRequest): + The request object. The request message for listing DLP + jobs. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsPager: + The response message for listing DLP + jobs. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListDlpJobsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListDlpJobsRequest): + request = dlp.ListDlpJobsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_dlp_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDlpJobsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_dlp_job(self, + request: dlp.GetDlpJobRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.DlpJob: + r"""Gets the latest state of a long-running DlpJob. + See https://cloud.google.com/dlp/docs/inspecting-storage + and https://cloud.google.com/dlp/docs/compute-risk- + analysis to learn more. + + Args: + request (google.cloud.dlp_v2.types.GetDlpJobRequest): + The request object. The request message for + [DlpJobs.GetDlpJob][]. + name (str): + Required. The name of the DlpJob + resource. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.DlpJob: + Combines all of the information about + a DLP job. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.GetDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.GetDlpJobRequest): + request = dlp.GetDlpJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_dlp_job(self, + request: dlp.DeleteDlpJobRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a long-running DlpJob. This method indicates + that the client is no longer interested in the DlpJob + result. The job will be cancelled if possible. + See https://cloud.google.com/dlp/docs/inspecting-storage + and https://cloud.google.com/dlp/docs/compute-risk- + analysis to learn more. + + Args: + request (google.cloud.dlp_v2.types.DeleteDlpJobRequest): + The request object. The request message for deleting a + DLP job. + name (str): + Required. The name of the DlpJob + resource to be deleted. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeleteDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeleteDlpJobRequest): + request = dlp.DeleteDlpJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_dlp_job(self, + request: dlp.CancelDlpJobRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running + DlpJob. The server makes a best effort to cancel the + DlpJob, but success is not guaranteed. + See https://cloud.google.com/dlp/docs/inspecting-storage + and https://cloud.google.com/dlp/docs/compute-risk- + analysis to learn more. + + Args: + request (google.cloud.dlp_v2.types.CancelDlpJobRequest): + The request object. The request message for canceling a + DLP job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CancelDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CancelDlpJobRequest): + request = dlp.CancelDlpJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_stored_info_type(self, + request: dlp.CreateStoredInfoTypeRequest = None, + *, + parent: str = None, + config: dlp.StoredInfoTypeConfig = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Creates a pre-built stored infoType to be used for + inspection. See + https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Args: + request (google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest): + The request object. Request message for + CreateStoredInfoType. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): + Required. Configuration of the + storedInfoType to create. + + This corresponds to the ``config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, config]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.CreateStoredInfoTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.CreateStoredInfoTypeRequest): + request = dlp.CreateStoredInfoTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if config is not None: + request.config = config + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_stored_info_type(self, + request: dlp.UpdateStoredInfoTypeRequest = None, + *, + name: str = None, + config: dlp.StoredInfoTypeConfig = None, + update_mask: field_mask_pb2.FieldMask = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Updates the stored infoType by creating a new + version. The existing version will continue to be used + until the new version is ready. See + https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Args: + request (google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest): + The request object. Request message for + UpdateStoredInfoType. + name (str): + Required. Resource name of organization and + storedInfoType to be updated, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): + Updated configuration for the + storedInfoType. If not provided, a new + version of the storedInfoType will be + created with the existing configuration. + + This corresponds to the ``config`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get + updated. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name, config, update_mask]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.UpdateStoredInfoTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.UpdateStoredInfoTypeRequest): + request = dlp.UpdateStoredInfoTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if config is not None: + request.config = config + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_stored_info_type(self, + request: dlp.GetStoredInfoTypeRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.StoredInfoType: + r"""Gets a stored infoType. + See https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Args: + request (google.cloud.dlp_v2.types.GetStoredInfoTypeRequest): + The request object. Request message for + GetStoredInfoType. + name (str): + Required. Resource name of the organization and + storedInfoType to be read, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.StoredInfoType: + StoredInfoType resource message that + contains information about the current + version and any pending updates. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.GetStoredInfoTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.GetStoredInfoTypeRequest): + request = dlp.GetStoredInfoTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_stored_info_types(self, + request: dlp.ListStoredInfoTypesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListStoredInfoTypesPager: + r"""Lists stored infoTypes. + See https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Args: + request (google.cloud.dlp_v2.types.ListStoredInfoTypesRequest): + The request object. Request message for + ListStoredInfoTypes. + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope + of the request (project or organization) and whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults + to global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a + parent project with the identifier ``example-project``, + and specifies the ``europe-west3`` location for + processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesPager: + Response message for + ListStoredInfoTypes. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.ListStoredInfoTypesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.ListStoredInfoTypesRequest): + request = dlp.ListStoredInfoTypesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_stored_info_types] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListStoredInfoTypesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_stored_info_type(self, + request: dlp.DeleteStoredInfoTypeRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a stored infoType. + See https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Args: + request (google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest): + The request object. Request message for + DeleteStoredInfoType. + name (str): + Required. Resource name of the organization and + storedInfoType to be deleted, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.DeleteStoredInfoTypeRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.DeleteStoredInfoTypeRequest): + request = dlp.DeleteStoredInfoTypeRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_stored_info_type] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def hybrid_inspect_dlp_job(self, + request: dlp.HybridInspectDlpJobRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dlp.HybridInspectResponse: + r"""Inspect hybrid content and store findings to a job. + To review the findings inspect the job. Inspection will + occur asynchronously. + Early access feature is in a pre-release state and might + change or have limited support. For more information, + see + https://cloud.google.com/products#product-launch-stages. + + Args: + request (google.cloud.dlp_v2.types.HybridInspectDlpJobRequest): + The request object. Request to search for potentially + sensitive info in a custom location. + name (str): + Required. Resource name of the job to execute a hybrid + inspect on, for example + ``projects/dlp-test-project/dlpJob/53234423``. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.dlp_v2.types.HybridInspectResponse: + Quota exceeded errors will be thrown + once quota has been met. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a dlp.HybridInspectDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.HybridInspectDlpJobRequest): + request = dlp.HybridInspectDlpJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.hybrid_inspect_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def finish_dlp_job(self, + request: dlp.FinishDlpJobRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Finish a running hybrid DlpJob. Triggers the + finalization steps and running of any enabled actions + that have not yet run. Early access feature is in a pre- + release state and might change or have limited support. + For more information, see + https://cloud.google.com/products#product-launch-stages. + + Args: + request (google.cloud.dlp_v2.types.FinishDlpJobRequest): + The request object. The request message for finishing a + DLP hybrid job. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Minor optimization to avoid making a copy if the user passes + # in a dlp.FinishDlpJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, dlp.FinishDlpJobRequest): + request = dlp.FinishDlpJobRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.finish_dlp_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-dlp", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "DlpServiceClient", +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py new file mode 100644 index 00000000..51cc2937 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py @@ -0,0 +1,628 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional + +from google.cloud.dlp_v2.types import dlp + + +class ListInspectTemplatesPager: + """A pager for iterating through ``list_inspect_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``inspect_templates`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListInspectTemplates`` requests and continue to iterate + through the ``inspect_templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.ListInspectTemplatesResponse], + request: dlp.ListInspectTemplatesRequest, + response: dlp.ListInspectTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListInspectTemplatesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListInspectTemplatesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListInspectTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[dlp.ListInspectTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[dlp.InspectTemplate]: + for page in self.pages: + yield from page.inspect_templates + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListInspectTemplatesAsyncPager: + """A pager for iterating through ``list_inspect_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``inspect_templates`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListInspectTemplates`` requests and continue to iterate + through the ``inspect_templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.ListInspectTemplatesResponse]], + request: dlp.ListInspectTemplatesRequest, + response: dlp.ListInspectTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListInspectTemplatesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListInspectTemplatesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListInspectTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[dlp.ListInspectTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[dlp.InspectTemplate]: + async def async_generator(): + async for page in self.pages: + for response in page.inspect_templates: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDeidentifyTemplatesPager: + """A pager for iterating through ``list_deidentify_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``deidentify_templates`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDeidentifyTemplates`` requests and continue to iterate + through the ``deidentify_templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.ListDeidentifyTemplatesResponse], + request: dlp.ListDeidentifyTemplatesRequest, + response: dlp.ListDeidentifyTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListDeidentifyTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[dlp.ListDeidentifyTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[dlp.DeidentifyTemplate]: + for page in self.pages: + yield from page.deidentify_templates + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDeidentifyTemplatesAsyncPager: + """A pager for iterating through ``list_deidentify_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``deidentify_templates`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDeidentifyTemplates`` requests and continue to iterate + through the ``deidentify_templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.ListDeidentifyTemplatesResponse]], + request: dlp.ListDeidentifyTemplatesRequest, + response: dlp.ListDeidentifyTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListDeidentifyTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[dlp.ListDeidentifyTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[dlp.DeidentifyTemplate]: + async def async_generator(): + async for page in self.pages: + for response in page.deidentify_templates: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListJobTriggersPager: + """A pager for iterating through ``list_job_triggers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` object, and + provides an ``__iter__`` method to iterate through its + ``job_triggers`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListJobTriggers`` requests and continue to iterate + through the ``job_triggers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.ListJobTriggersResponse], + request: dlp.ListJobTriggersRequest, + response: dlp.ListJobTriggersResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListJobTriggersRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListJobTriggersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListJobTriggersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[dlp.ListJobTriggersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[dlp.JobTrigger]: + for page in self.pages: + yield from page.job_triggers + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListJobTriggersAsyncPager: + """A pager for iterating through ``list_job_triggers`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``job_triggers`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListJobTriggers`` requests and continue to iterate + through the ``job_triggers`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.ListJobTriggersResponse]], + request: dlp.ListJobTriggersRequest, + response: dlp.ListJobTriggersResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListJobTriggersRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListJobTriggersResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListJobTriggersRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[dlp.ListJobTriggersResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[dlp.JobTrigger]: + async def async_generator(): + async for page in self.pages: + for response in page.job_triggers: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDlpJobsPager: + """A pager for iterating through ``list_dlp_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDlpJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.ListDlpJobsResponse], + request: dlp.ListDlpJobsRequest, + response: dlp.ListDlpJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListDlpJobsRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListDlpJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListDlpJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[dlp.ListDlpJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[dlp.DlpJob]: + for page in self.pages: + yield from page.jobs + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListDlpJobsAsyncPager: + """A pager for iterating through ``list_dlp_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDlpJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.ListDlpJobsResponse]], + request: dlp.ListDlpJobsRequest, + response: dlp.ListDlpJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListDlpJobsRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListDlpJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListDlpJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[dlp.ListDlpJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[dlp.DlpJob]: + async def async_generator(): + async for page in self.pages: + for response in page.jobs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListStoredInfoTypesPager: + """A pager for iterating through ``list_stored_info_types`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``stored_info_types`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListStoredInfoTypes`` requests and continue to iterate + through the ``stored_info_types`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., dlp.ListStoredInfoTypesResponse], + request: dlp.ListStoredInfoTypesRequest, + response: dlp.ListStoredInfoTypesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListStoredInfoTypesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListStoredInfoTypesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListStoredInfoTypesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[dlp.ListStoredInfoTypesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[dlp.StoredInfoType]: + for page in self.pages: + yield from page.stored_info_types + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListStoredInfoTypesAsyncPager: + """A pager for iterating through ``list_stored_info_types`` requests. + + This class thinly wraps an initial + :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``stored_info_types`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListStoredInfoTypes`` requests and continue to iterate + through the ``stored_info_types`` field on the + corresponding responses. + + All the usual :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[dlp.ListStoredInfoTypesResponse]], + request: dlp.ListStoredInfoTypesRequest, + response: dlp.ListStoredInfoTypesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.dlp_v2.types.ListStoredInfoTypesRequest): + The initial request object. + response (google.cloud.dlp_v2.types.ListStoredInfoTypesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dlp.ListStoredInfoTypesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[dlp.ListStoredInfoTypesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[dlp.StoredInfoType]: + async def async_generator(): + async for page in self.pages: + for response in page.stored_info_types: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py new file mode 100644 index 00000000..dd85ecf8 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DlpServiceTransport +from .grpc import DlpServiceGrpcTransport +from .grpc_asyncio import DlpServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DlpServiceTransport]] +_transport_registry['grpc'] = DlpServiceGrpcTransport +_transport_registry['grpc_asyncio'] = DlpServiceGrpcAsyncIOTransport + +__all__ = ( + 'DlpServiceTransport', + 'DlpServiceGrpcTransport', + 'DlpServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py new file mode 100644 index 00000000..4abe2c3d --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py @@ -0,0 +1,771 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.dlp_v2.types import dlp +from google.protobuf import empty_pb2 # type: ignore + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + 'google-cloud-dlp', + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + + +class DlpServiceTransport(abc.ABC): + """Abstract transport class for DlpService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'dlp.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + + # If the credentials is service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.inspect_content: gapic_v1.method.wrap_method( + self.inspect_content, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.redact_image: gapic_v1.method.wrap_method( + self.redact_image, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.deidentify_content: gapic_v1.method.wrap_method( + self.deidentify_content, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.reidentify_content: gapic_v1.method.wrap_method( + self.reidentify_content, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_info_types: gapic_v1.method.wrap_method( + self.list_info_types, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.create_inspect_template: gapic_v1.method.wrap_method( + self.create_inspect_template, + default_timeout=300.0, + client_info=client_info, + ), + self.update_inspect_template: gapic_v1.method.wrap_method( + self.update_inspect_template, + default_timeout=300.0, + client_info=client_info, + ), + self.get_inspect_template: gapic_v1.method.wrap_method( + self.get_inspect_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_inspect_templates: gapic_v1.method.wrap_method( + self.list_inspect_templates, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_inspect_template: gapic_v1.method.wrap_method( + self.delete_inspect_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.create_deidentify_template: gapic_v1.method.wrap_method( + self.create_deidentify_template, + default_timeout=300.0, + client_info=client_info, + ), + self.update_deidentify_template: gapic_v1.method.wrap_method( + self.update_deidentify_template, + default_timeout=300.0, + client_info=client_info, + ), + self.get_deidentify_template: gapic_v1.method.wrap_method( + self.get_deidentify_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_deidentify_templates: gapic_v1.method.wrap_method( + self.list_deidentify_templates, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_deidentify_template: gapic_v1.method.wrap_method( + self.delete_deidentify_template, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.create_job_trigger: gapic_v1.method.wrap_method( + self.create_job_trigger, + default_timeout=300.0, + client_info=client_info, + ), + self.update_job_trigger: gapic_v1.method.wrap_method( + self.update_job_trigger, + default_timeout=300.0, + client_info=client_info, + ), + self.hybrid_inspect_job_trigger: gapic_v1.method.wrap_method( + self.hybrid_inspect_job_trigger, + default_timeout=300.0, + client_info=client_info, + ), + self.get_job_trigger: gapic_v1.method.wrap_method( + self.get_job_trigger, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_job_triggers: gapic_v1.method.wrap_method( + self.list_job_triggers, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_job_trigger: gapic_v1.method.wrap_method( + self.delete_job_trigger, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.activate_job_trigger: gapic_v1.method.wrap_method( + self.activate_job_trigger, + default_timeout=300.0, + client_info=client_info, + ), + self.create_dlp_job: gapic_v1.method.wrap_method( + self.create_dlp_job, + default_timeout=300.0, + client_info=client_info, + ), + self.list_dlp_jobs: gapic_v1.method.wrap_method( + self.list_dlp_jobs, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.get_dlp_job: gapic_v1.method.wrap_method( + self.get_dlp_job, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_dlp_job: gapic_v1.method.wrap_method( + self.delete_dlp_job, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.cancel_dlp_job: gapic_v1.method.wrap_method( + self.cancel_dlp_job, + default_timeout=300.0, + client_info=client_info, + ), + self.create_stored_info_type: gapic_v1.method.wrap_method( + self.create_stored_info_type, + default_timeout=300.0, + client_info=client_info, + ), + self.update_stored_info_type: gapic_v1.method.wrap_method( + self.update_stored_info_type, + default_timeout=300.0, + client_info=client_info, + ), + self.get_stored_info_type: gapic_v1.method.wrap_method( + self.get_stored_info_type, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.list_stored_info_types: gapic_v1.method.wrap_method( + self.list_stored_info_types, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.delete_stored_info_type: gapic_v1.method.wrap_method( + self.delete_stored_info_type, + default_retry=retries.Retry( +initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( + core_exceptions.DeadlineExceeded, + core_exceptions.ServiceUnavailable, + ), + deadline=300.0, + ), + default_timeout=300.0, + client_info=client_info, + ), + self.hybrid_inspect_dlp_job: gapic_v1.method.wrap_method( + self.hybrid_inspect_dlp_job, + default_timeout=300.0, + client_info=client_info, + ), + self.finish_dlp_job: gapic_v1.method.wrap_method( + self.finish_dlp_job, + default_timeout=300.0, + client_info=client_info, + ), + } + + @property + def inspect_content(self) -> Callable[ + [dlp.InspectContentRequest], + Union[ + dlp.InspectContentResponse, + Awaitable[dlp.InspectContentResponse] + ]]: + raise NotImplementedError() + + @property + def redact_image(self) -> Callable[ + [dlp.RedactImageRequest], + Union[ + dlp.RedactImageResponse, + Awaitable[dlp.RedactImageResponse] + ]]: + raise NotImplementedError() + + @property + def deidentify_content(self) -> Callable[ + [dlp.DeidentifyContentRequest], + Union[ + dlp.DeidentifyContentResponse, + Awaitable[dlp.DeidentifyContentResponse] + ]]: + raise NotImplementedError() + + @property + def reidentify_content(self) -> Callable[ + [dlp.ReidentifyContentRequest], + Union[ + dlp.ReidentifyContentResponse, + Awaitable[dlp.ReidentifyContentResponse] + ]]: + raise NotImplementedError() + + @property + def list_info_types(self) -> Callable[ + [dlp.ListInfoTypesRequest], + Union[ + dlp.ListInfoTypesResponse, + Awaitable[dlp.ListInfoTypesResponse] + ]]: + raise NotImplementedError() + + @property + def create_inspect_template(self) -> Callable[ + [dlp.CreateInspectTemplateRequest], + Union[ + dlp.InspectTemplate, + Awaitable[dlp.InspectTemplate] + ]]: + raise NotImplementedError() + + @property + def update_inspect_template(self) -> Callable[ + [dlp.UpdateInspectTemplateRequest], + Union[ + dlp.InspectTemplate, + Awaitable[dlp.InspectTemplate] + ]]: + raise NotImplementedError() + + @property + def get_inspect_template(self) -> Callable[ + [dlp.GetInspectTemplateRequest], + Union[ + dlp.InspectTemplate, + Awaitable[dlp.InspectTemplate] + ]]: + raise NotImplementedError() + + @property + def list_inspect_templates(self) -> Callable[ + [dlp.ListInspectTemplatesRequest], + Union[ + dlp.ListInspectTemplatesResponse, + Awaitable[dlp.ListInspectTemplatesResponse] + ]]: + raise NotImplementedError() + + @property + def delete_inspect_template(self) -> Callable[ + [dlp.DeleteInspectTemplateRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def create_deidentify_template(self) -> Callable[ + [dlp.CreateDeidentifyTemplateRequest], + Union[ + dlp.DeidentifyTemplate, + Awaitable[dlp.DeidentifyTemplate] + ]]: + raise NotImplementedError() + + @property + def update_deidentify_template(self) -> Callable[ + [dlp.UpdateDeidentifyTemplateRequest], + Union[ + dlp.DeidentifyTemplate, + Awaitable[dlp.DeidentifyTemplate] + ]]: + raise NotImplementedError() + + @property + def get_deidentify_template(self) -> Callable[ + [dlp.GetDeidentifyTemplateRequest], + Union[ + dlp.DeidentifyTemplate, + Awaitable[dlp.DeidentifyTemplate] + ]]: + raise NotImplementedError() + + @property + def list_deidentify_templates(self) -> Callable[ + [dlp.ListDeidentifyTemplatesRequest], + Union[ + dlp.ListDeidentifyTemplatesResponse, + Awaitable[dlp.ListDeidentifyTemplatesResponse] + ]]: + raise NotImplementedError() + + @property + def delete_deidentify_template(self) -> Callable[ + [dlp.DeleteDeidentifyTemplateRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def create_job_trigger(self) -> Callable[ + [dlp.CreateJobTriggerRequest], + Union[ + dlp.JobTrigger, + Awaitable[dlp.JobTrigger] + ]]: + raise NotImplementedError() + + @property + def update_job_trigger(self) -> Callable[ + [dlp.UpdateJobTriggerRequest], + Union[ + dlp.JobTrigger, + Awaitable[dlp.JobTrigger] + ]]: + raise NotImplementedError() + + @property + def hybrid_inspect_job_trigger(self) -> Callable[ + [dlp.HybridInspectJobTriggerRequest], + Union[ + dlp.HybridInspectResponse, + Awaitable[dlp.HybridInspectResponse] + ]]: + raise NotImplementedError() + + @property + def get_job_trigger(self) -> Callable[ + [dlp.GetJobTriggerRequest], + Union[ + dlp.JobTrigger, + Awaitable[dlp.JobTrigger] + ]]: + raise NotImplementedError() + + @property + def list_job_triggers(self) -> Callable[ + [dlp.ListJobTriggersRequest], + Union[ + dlp.ListJobTriggersResponse, + Awaitable[dlp.ListJobTriggersResponse] + ]]: + raise NotImplementedError() + + @property + def delete_job_trigger(self) -> Callable[ + [dlp.DeleteJobTriggerRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def activate_job_trigger(self) -> Callable[ + [dlp.ActivateJobTriggerRequest], + Union[ + dlp.DlpJob, + Awaitable[dlp.DlpJob] + ]]: + raise NotImplementedError() + + @property + def create_dlp_job(self) -> Callable[ + [dlp.CreateDlpJobRequest], + Union[ + dlp.DlpJob, + Awaitable[dlp.DlpJob] + ]]: + raise NotImplementedError() + + @property + def list_dlp_jobs(self) -> Callable[ + [dlp.ListDlpJobsRequest], + Union[ + dlp.ListDlpJobsResponse, + Awaitable[dlp.ListDlpJobsResponse] + ]]: + raise NotImplementedError() + + @property + def get_dlp_job(self) -> Callable[ + [dlp.GetDlpJobRequest], + Union[ + dlp.DlpJob, + Awaitable[dlp.DlpJob] + ]]: + raise NotImplementedError() + + @property + def delete_dlp_job(self) -> Callable[ + [dlp.DeleteDlpJobRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def cancel_dlp_job(self) -> Callable[ + [dlp.CancelDlpJobRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def create_stored_info_type(self) -> Callable[ + [dlp.CreateStoredInfoTypeRequest], + Union[ + dlp.StoredInfoType, + Awaitable[dlp.StoredInfoType] + ]]: + raise NotImplementedError() + + @property + def update_stored_info_type(self) -> Callable[ + [dlp.UpdateStoredInfoTypeRequest], + Union[ + dlp.StoredInfoType, + Awaitable[dlp.StoredInfoType] + ]]: + raise NotImplementedError() + + @property + def get_stored_info_type(self) -> Callable[ + [dlp.GetStoredInfoTypeRequest], + Union[ + dlp.StoredInfoType, + Awaitable[dlp.StoredInfoType] + ]]: + raise NotImplementedError() + + @property + def list_stored_info_types(self) -> Callable[ + [dlp.ListStoredInfoTypesRequest], + Union[ + dlp.ListStoredInfoTypesResponse, + Awaitable[dlp.ListStoredInfoTypesResponse] + ]]: + raise NotImplementedError() + + @property + def delete_stored_info_type(self) -> Callable[ + [dlp.DeleteStoredInfoTypeRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def hybrid_inspect_dlp_job(self) -> Callable[ + [dlp.HybridInspectDlpJobRequest], + Union[ + dlp.HybridInspectResponse, + Awaitable[dlp.HybridInspectResponse] + ]]: + raise NotImplementedError() + + @property + def finish_dlp_job(self) -> Callable[ + [dlp.FinishDlpJobRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + +__all__ = ( + 'DlpServiceTransport', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py new file mode 100644 index 00000000..69020a3d --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py @@ -0,0 +1,1244 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.dlp_v2.types import dlp +from google.protobuf import empty_pb2 # type: ignore +from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO + + +class DlpServiceGrpcTransport(DlpServiceTransport): + """gRPC backend transport for DlpService. + + The Cloud Data Loss Prevention (DLP) API is a service that + allows clients to detect the presence of Personally Identifiable + Information (PII) and other privacy-sensitive data in user- + supplied, unstructured data streams, like text blocks or images. + The service also includes methods for sensitive data redaction + and scheduling of data scans on Google Cloud Platform based data + sets. + To learn more about concepts and find how-to guides see + https://cloud.google.com/dlp/docs/. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'dlp.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'dlp.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def inspect_content(self) -> Callable[ + [dlp.InspectContentRequest], + dlp.InspectContentResponse]: + r"""Return a callable for the inspect content method over gRPC. + + Finds potentially sensitive info in content. + This method has limits on input size, processing time, + and output size. + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + For how to guides, see + https://cloud.google.com/dlp/docs/inspecting-images and + https://cloud.google.com/dlp/docs/inspecting-text, + + Returns: + Callable[[~.InspectContentRequest], + ~.InspectContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'inspect_content' not in self._stubs: + self._stubs['inspect_content'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/InspectContent', + request_serializer=dlp.InspectContentRequest.serialize, + response_deserializer=dlp.InspectContentResponse.deserialize, + ) + return self._stubs['inspect_content'] + + @property + def redact_image(self) -> Callable[ + [dlp.RedactImageRequest], + dlp.RedactImageResponse]: + r"""Return a callable for the redact image method over gRPC. + + Redacts potentially sensitive info from an image. + This method has limits on input size, processing time, + and output size. See + https://cloud.google.com/dlp/docs/redacting-sensitive- + data-images to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.RedactImageRequest], + ~.RedactImageResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'redact_image' not in self._stubs: + self._stubs['redact_image'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/RedactImage', + request_serializer=dlp.RedactImageRequest.serialize, + response_deserializer=dlp.RedactImageResponse.deserialize, + ) + return self._stubs['redact_image'] + + @property + def deidentify_content(self) -> Callable[ + [dlp.DeidentifyContentRequest], + dlp.DeidentifyContentResponse]: + r"""Return a callable for the deidentify content method over gRPC. + + De-identifies potentially sensitive info from a + ContentItem. This method has limits on input size and + output size. See + https://cloud.google.com/dlp/docs/deidentify-sensitive- + data to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.DeidentifyContentRequest], + ~.DeidentifyContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'deidentify_content' not in self._stubs: + self._stubs['deidentify_content'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeidentifyContent', + request_serializer=dlp.DeidentifyContentRequest.serialize, + response_deserializer=dlp.DeidentifyContentResponse.deserialize, + ) + return self._stubs['deidentify_content'] + + @property + def reidentify_content(self) -> Callable[ + [dlp.ReidentifyContentRequest], + dlp.ReidentifyContentResponse]: + r"""Return a callable for the reidentify content method over gRPC. + + Re-identifies content that has been de-identified. See + https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example + to learn more. + + Returns: + Callable[[~.ReidentifyContentRequest], + ~.ReidentifyContentResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'reidentify_content' not in self._stubs: + self._stubs['reidentify_content'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ReidentifyContent', + request_serializer=dlp.ReidentifyContentRequest.serialize, + response_deserializer=dlp.ReidentifyContentResponse.deserialize, + ) + return self._stubs['reidentify_content'] + + @property + def list_info_types(self) -> Callable[ + [dlp.ListInfoTypesRequest], + dlp.ListInfoTypesResponse]: + r"""Return a callable for the list info types method over gRPC. + + Returns a list of the sensitive information types + that the DLP API supports. See + https://cloud.google.com/dlp/docs/infotypes-reference to + learn more. + + Returns: + Callable[[~.ListInfoTypesRequest], + ~.ListInfoTypesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_info_types' not in self._stubs: + self._stubs['list_info_types'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListInfoTypes', + request_serializer=dlp.ListInfoTypesRequest.serialize, + response_deserializer=dlp.ListInfoTypesResponse.deserialize, + ) + return self._stubs['list_info_types'] + + @property + def create_inspect_template(self) -> Callable[ + [dlp.CreateInspectTemplateRequest], + dlp.InspectTemplate]: + r"""Return a callable for the create inspect template method over gRPC. + + Creates an InspectTemplate for re-using frequently + used configuration for inspecting content, images, and + storage. See https://cloud.google.com/dlp/docs/creating- + templates to learn more. + + Returns: + Callable[[~.CreateInspectTemplateRequest], + ~.InspectTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_inspect_template' not in self._stubs: + self._stubs['create_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateInspectTemplate', + request_serializer=dlp.CreateInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs['create_inspect_template'] + + @property + def update_inspect_template(self) -> Callable[ + [dlp.UpdateInspectTemplateRequest], + dlp.InspectTemplate]: + r"""Return a callable for the update inspect template method over gRPC. + + Updates the InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.UpdateInspectTemplateRequest], + ~.InspectTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_inspect_template' not in self._stubs: + self._stubs['update_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateInspectTemplate', + request_serializer=dlp.UpdateInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs['update_inspect_template'] + + @property + def get_inspect_template(self) -> Callable[ + [dlp.GetInspectTemplateRequest], + dlp.InspectTemplate]: + r"""Return a callable for the get inspect template method over gRPC. + + Gets an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.GetInspectTemplateRequest], + ~.InspectTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_inspect_template' not in self._stubs: + self._stubs['get_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetInspectTemplate', + request_serializer=dlp.GetInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs['get_inspect_template'] + + @property + def list_inspect_templates(self) -> Callable[ + [dlp.ListInspectTemplatesRequest], + dlp.ListInspectTemplatesResponse]: + r"""Return a callable for the list inspect templates method over gRPC. + + Lists InspectTemplates. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.ListInspectTemplatesRequest], + ~.ListInspectTemplatesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_inspect_templates' not in self._stubs: + self._stubs['list_inspect_templates'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListInspectTemplates', + request_serializer=dlp.ListInspectTemplatesRequest.serialize, + response_deserializer=dlp.ListInspectTemplatesResponse.deserialize, + ) + return self._stubs['list_inspect_templates'] + + @property + def delete_inspect_template(self) -> Callable[ + [dlp.DeleteInspectTemplateRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete inspect template method over gRPC. + + Deletes an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.DeleteInspectTemplateRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_inspect_template' not in self._stubs: + self._stubs['delete_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteInspectTemplate', + request_serializer=dlp.DeleteInspectTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_inspect_template'] + + @property + def create_deidentify_template(self) -> Callable[ + [dlp.CreateDeidentifyTemplateRequest], + dlp.DeidentifyTemplate]: + r"""Return a callable for the create deidentify template method over gRPC. + + Creates a DeidentifyTemplate for re-using frequently + used configuration for de-identifying content, images, + and storage. See + https://cloud.google.com/dlp/docs/creating-templates- + deid to learn more. + + Returns: + Callable[[~.CreateDeidentifyTemplateRequest], + ~.DeidentifyTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_deidentify_template' not in self._stubs: + self._stubs['create_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateDeidentifyTemplate', + request_serializer=dlp.CreateDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs['create_deidentify_template'] + + @property + def update_deidentify_template(self) -> Callable[ + [dlp.UpdateDeidentifyTemplateRequest], + dlp.DeidentifyTemplate]: + r"""Return a callable for the update deidentify template method over gRPC. + + Updates the DeidentifyTemplate. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Returns: + Callable[[~.UpdateDeidentifyTemplateRequest], + ~.DeidentifyTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_deidentify_template' not in self._stubs: + self._stubs['update_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateDeidentifyTemplate', + request_serializer=dlp.UpdateDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs['update_deidentify_template'] + + @property + def get_deidentify_template(self) -> Callable[ + [dlp.GetDeidentifyTemplateRequest], + dlp.DeidentifyTemplate]: + r"""Return a callable for the get deidentify template method over gRPC. + + Gets a DeidentifyTemplate. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Returns: + Callable[[~.GetDeidentifyTemplateRequest], + ~.DeidentifyTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_deidentify_template' not in self._stubs: + self._stubs['get_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetDeidentifyTemplate', + request_serializer=dlp.GetDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs['get_deidentify_template'] + + @property + def list_deidentify_templates(self) -> Callable[ + [dlp.ListDeidentifyTemplatesRequest], + dlp.ListDeidentifyTemplatesResponse]: + r"""Return a callable for the list deidentify templates method over gRPC. + + Lists DeidentifyTemplates. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Returns: + Callable[[~.ListDeidentifyTemplatesRequest], + ~.ListDeidentifyTemplatesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_deidentify_templates' not in self._stubs: + self._stubs['list_deidentify_templates'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListDeidentifyTemplates', + request_serializer=dlp.ListDeidentifyTemplatesRequest.serialize, + response_deserializer=dlp.ListDeidentifyTemplatesResponse.deserialize, + ) + return self._stubs['list_deidentify_templates'] + + @property + def delete_deidentify_template(self) -> Callable[ + [dlp.DeleteDeidentifyTemplateRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete deidentify template method over gRPC. + + Deletes a DeidentifyTemplate. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Returns: + Callable[[~.DeleteDeidentifyTemplateRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_deidentify_template' not in self._stubs: + self._stubs['delete_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteDeidentifyTemplate', + request_serializer=dlp.DeleteDeidentifyTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_deidentify_template'] + + @property + def create_job_trigger(self) -> Callable[ + [dlp.CreateJobTriggerRequest], + dlp.JobTrigger]: + r"""Return a callable for the create job trigger method over gRPC. + + Creates a job trigger to run DLP actions such as + scanning storage for sensitive information on a set + schedule. See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.CreateJobTriggerRequest], + ~.JobTrigger]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_job_trigger' not in self._stubs: + self._stubs['create_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateJobTrigger', + request_serializer=dlp.CreateJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs['create_job_trigger'] + + @property + def update_job_trigger(self) -> Callable[ + [dlp.UpdateJobTriggerRequest], + dlp.JobTrigger]: + r"""Return a callable for the update job trigger method over gRPC. + + Updates a job trigger. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Returns: + Callable[[~.UpdateJobTriggerRequest], + ~.JobTrigger]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_job_trigger' not in self._stubs: + self._stubs['update_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateJobTrigger', + request_serializer=dlp.UpdateJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs['update_job_trigger'] + + @property + def hybrid_inspect_job_trigger(self) -> Callable[ + [dlp.HybridInspectJobTriggerRequest], + dlp.HybridInspectResponse]: + r"""Return a callable for the hybrid inspect job trigger method over gRPC. + + Inspect hybrid content and store findings to a + trigger. The inspection will be processed + asynchronously. To review the findings monitor the jobs + within the trigger. + Early access feature is in a pre-release state and might + change or have limited support. For more information, + see + https://cloud.google.com/products#product-launch-stages. + + Returns: + Callable[[~.HybridInspectJobTriggerRequest], + ~.HybridInspectResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'hybrid_inspect_job_trigger' not in self._stubs: + self._stubs['hybrid_inspect_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/HybridInspectJobTrigger', + request_serializer=dlp.HybridInspectJobTriggerRequest.serialize, + response_deserializer=dlp.HybridInspectResponse.deserialize, + ) + return self._stubs['hybrid_inspect_job_trigger'] + + @property + def get_job_trigger(self) -> Callable[ + [dlp.GetJobTriggerRequest], + dlp.JobTrigger]: + r"""Return a callable for the get job trigger method over gRPC. + + Gets a job trigger. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Returns: + Callable[[~.GetJobTriggerRequest], + ~.JobTrigger]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job_trigger' not in self._stubs: + self._stubs['get_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetJobTrigger', + request_serializer=dlp.GetJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs['get_job_trigger'] + + @property + def list_job_triggers(self) -> Callable[ + [dlp.ListJobTriggersRequest], + dlp.ListJobTriggersResponse]: + r"""Return a callable for the list job triggers method over gRPC. + + Lists job triggers. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Returns: + Callable[[~.ListJobTriggersRequest], + ~.ListJobTriggersResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_job_triggers' not in self._stubs: + self._stubs['list_job_triggers'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListJobTriggers', + request_serializer=dlp.ListJobTriggersRequest.serialize, + response_deserializer=dlp.ListJobTriggersResponse.deserialize, + ) + return self._stubs['list_job_triggers'] + + @property + def delete_job_trigger(self) -> Callable[ + [dlp.DeleteJobTriggerRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete job trigger method over gRPC. + + Deletes a job trigger. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Returns: + Callable[[~.DeleteJobTriggerRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_job_trigger' not in self._stubs: + self._stubs['delete_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteJobTrigger', + request_serializer=dlp.DeleteJobTriggerRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_job_trigger'] + + @property + def activate_job_trigger(self) -> Callable[ + [dlp.ActivateJobTriggerRequest], + dlp.DlpJob]: + r"""Return a callable for the activate job trigger method over gRPC. + + Activate a job trigger. Causes the immediate execute + of a trigger instead of waiting on the trigger event to + occur. + + Returns: + Callable[[~.ActivateJobTriggerRequest], + ~.DlpJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'activate_job_trigger' not in self._stubs: + self._stubs['activate_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ActivateJobTrigger', + request_serializer=dlp.ActivateJobTriggerRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs['activate_job_trigger'] + + @property + def create_dlp_job(self) -> Callable[ + [dlp.CreateDlpJobRequest], + dlp.DlpJob]: + r"""Return a callable for the create dlp job method over gRPC. + + Creates a new job to inspect storage or calculate + risk metrics. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + When no InfoTypes or CustomInfoTypes are specified in + inspect jobs, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.CreateDlpJobRequest], + ~.DlpJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_dlp_job' not in self._stubs: + self._stubs['create_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateDlpJob', + request_serializer=dlp.CreateDlpJobRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs['create_dlp_job'] + + @property + def list_dlp_jobs(self) -> Callable[ + [dlp.ListDlpJobsRequest], + dlp.ListDlpJobsResponse]: + r"""Return a callable for the list dlp jobs method over gRPC. + + Lists DlpJobs that match the specified filter in the + request. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.ListDlpJobsRequest], + ~.ListDlpJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_dlp_jobs' not in self._stubs: + self._stubs['list_dlp_jobs'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListDlpJobs', + request_serializer=dlp.ListDlpJobsRequest.serialize, + response_deserializer=dlp.ListDlpJobsResponse.deserialize, + ) + return self._stubs['list_dlp_jobs'] + + @property + def get_dlp_job(self) -> Callable[ + [dlp.GetDlpJobRequest], + dlp.DlpJob]: + r"""Return a callable for the get dlp job method over gRPC. + + Gets the latest state of a long-running DlpJob. + See https://cloud.google.com/dlp/docs/inspecting-storage + and https://cloud.google.com/dlp/docs/compute-risk- + analysis to learn more. + + Returns: + Callable[[~.GetDlpJobRequest], + ~.DlpJob]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_dlp_job' not in self._stubs: + self._stubs['get_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetDlpJob', + request_serializer=dlp.GetDlpJobRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs['get_dlp_job'] + + @property + def delete_dlp_job(self) -> Callable[ + [dlp.DeleteDlpJobRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete dlp job method over gRPC. + + Deletes a long-running DlpJob. This method indicates + that the client is no longer interested in the DlpJob + result. The job will be cancelled if possible. + See https://cloud.google.com/dlp/docs/inspecting-storage + and https://cloud.google.com/dlp/docs/compute-risk- + analysis to learn more. + + Returns: + Callable[[~.DeleteDlpJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_dlp_job' not in self._stubs: + self._stubs['delete_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteDlpJob', + request_serializer=dlp.DeleteDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_dlp_job'] + + @property + def cancel_dlp_job(self) -> Callable[ + [dlp.CancelDlpJobRequest], + empty_pb2.Empty]: + r"""Return a callable for the cancel dlp job method over gRPC. + + Starts asynchronous cancellation on a long-running + DlpJob. The server makes a best effort to cancel the + DlpJob, but success is not guaranteed. + See https://cloud.google.com/dlp/docs/inspecting-storage + and https://cloud.google.com/dlp/docs/compute-risk- + analysis to learn more. + + Returns: + Callable[[~.CancelDlpJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'cancel_dlp_job' not in self._stubs: + self._stubs['cancel_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CancelDlpJob', + request_serializer=dlp.CancelDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['cancel_dlp_job'] + + @property + def create_stored_info_type(self) -> Callable[ + [dlp.CreateStoredInfoTypeRequest], + dlp.StoredInfoType]: + r"""Return a callable for the create stored info type method over gRPC. + + Creates a pre-built stored infoType to be used for + inspection. See + https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Returns: + Callable[[~.CreateStoredInfoTypeRequest], + ~.StoredInfoType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_stored_info_type' not in self._stubs: + self._stubs['create_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateStoredInfoType', + request_serializer=dlp.CreateStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs['create_stored_info_type'] + + @property + def update_stored_info_type(self) -> Callable[ + [dlp.UpdateStoredInfoTypeRequest], + dlp.StoredInfoType]: + r"""Return a callable for the update stored info type method over gRPC. + + Updates the stored infoType by creating a new + version. The existing version will continue to be used + until the new version is ready. See + https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Returns: + Callable[[~.UpdateStoredInfoTypeRequest], + ~.StoredInfoType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_stored_info_type' not in self._stubs: + self._stubs['update_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateStoredInfoType', + request_serializer=dlp.UpdateStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs['update_stored_info_type'] + + @property + def get_stored_info_type(self) -> Callable[ + [dlp.GetStoredInfoTypeRequest], + dlp.StoredInfoType]: + r"""Return a callable for the get stored info type method over gRPC. + + Gets a stored infoType. + See https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Returns: + Callable[[~.GetStoredInfoTypeRequest], + ~.StoredInfoType]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_stored_info_type' not in self._stubs: + self._stubs['get_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetStoredInfoType', + request_serializer=dlp.GetStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs['get_stored_info_type'] + + @property + def list_stored_info_types(self) -> Callable[ + [dlp.ListStoredInfoTypesRequest], + dlp.ListStoredInfoTypesResponse]: + r"""Return a callable for the list stored info types method over gRPC. + + Lists stored infoTypes. + See https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Returns: + Callable[[~.ListStoredInfoTypesRequest], + ~.ListStoredInfoTypesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_stored_info_types' not in self._stubs: + self._stubs['list_stored_info_types'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListStoredInfoTypes', + request_serializer=dlp.ListStoredInfoTypesRequest.serialize, + response_deserializer=dlp.ListStoredInfoTypesResponse.deserialize, + ) + return self._stubs['list_stored_info_types'] + + @property + def delete_stored_info_type(self) -> Callable[ + [dlp.DeleteStoredInfoTypeRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete stored info type method over gRPC. + + Deletes a stored infoType. + See https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Returns: + Callable[[~.DeleteStoredInfoTypeRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_stored_info_type' not in self._stubs: + self._stubs['delete_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteStoredInfoType', + request_serializer=dlp.DeleteStoredInfoTypeRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_stored_info_type'] + + @property + def hybrid_inspect_dlp_job(self) -> Callable[ + [dlp.HybridInspectDlpJobRequest], + dlp.HybridInspectResponse]: + r"""Return a callable for the hybrid inspect dlp job method over gRPC. + + Inspect hybrid content and store findings to a job. + To review the findings inspect the job. Inspection will + occur asynchronously. + Early access feature is in a pre-release state and might + change or have limited support. For more information, + see + https://cloud.google.com/products#product-launch-stages. + + Returns: + Callable[[~.HybridInspectDlpJobRequest], + ~.HybridInspectResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'hybrid_inspect_dlp_job' not in self._stubs: + self._stubs['hybrid_inspect_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/HybridInspectDlpJob', + request_serializer=dlp.HybridInspectDlpJobRequest.serialize, + response_deserializer=dlp.HybridInspectResponse.deserialize, + ) + return self._stubs['hybrid_inspect_dlp_job'] + + @property + def finish_dlp_job(self) -> Callable[ + [dlp.FinishDlpJobRequest], + empty_pb2.Empty]: + r"""Return a callable for the finish dlp job method over gRPC. + + Finish a running hybrid DlpJob. Triggers the + finalization steps and running of any enabled actions + that have not yet run. Early access feature is in a pre- + release state and might change or have limited support. + For more information, see + https://cloud.google.com/products#product-launch-stages. + + Returns: + Callable[[~.FinishDlpJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'finish_dlp_job' not in self._stubs: + self._stubs['finish_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/FinishDlpJob', + request_serializer=dlp.FinishDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['finish_dlp_job'] + + +__all__ = ( + 'DlpServiceGrpcTransport', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py new file mode 100644 index 00000000..1854e30a --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py @@ -0,0 +1,1248 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.dlp_v2.types import dlp +from google.protobuf import empty_pb2 # type: ignore +from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import DlpServiceGrpcTransport + + +class DlpServiceGrpcAsyncIOTransport(DlpServiceTransport): + """gRPC AsyncIO backend transport for DlpService. + + The Cloud Data Loss Prevention (DLP) API is a service that + allows clients to detect the presence of Personally Identifiable + Information (PII) and other privacy-sensitive data in user- + supplied, unstructured data streams, like text blocks or images. + The service also includes methods for sensitive data redaction + and scheduling of data scans on Google Cloud Platform based data + sets. + To learn more about concepts and find how-to guides see + https://cloud.google.com/dlp/docs/. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'dlp.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'dlp.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def inspect_content(self) -> Callable[ + [dlp.InspectContentRequest], + Awaitable[dlp.InspectContentResponse]]: + r"""Return a callable for the inspect content method over gRPC. + + Finds potentially sensitive info in content. + This method has limits on input size, processing time, + and output size. + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + For how to guides, see + https://cloud.google.com/dlp/docs/inspecting-images and + https://cloud.google.com/dlp/docs/inspecting-text, + + Returns: + Callable[[~.InspectContentRequest], + Awaitable[~.InspectContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'inspect_content' not in self._stubs: + self._stubs['inspect_content'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/InspectContent', + request_serializer=dlp.InspectContentRequest.serialize, + response_deserializer=dlp.InspectContentResponse.deserialize, + ) + return self._stubs['inspect_content'] + + @property + def redact_image(self) -> Callable[ + [dlp.RedactImageRequest], + Awaitable[dlp.RedactImageResponse]]: + r"""Return a callable for the redact image method over gRPC. + + Redacts potentially sensitive info from an image. + This method has limits on input size, processing time, + and output size. See + https://cloud.google.com/dlp/docs/redacting-sensitive- + data-images to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.RedactImageRequest], + Awaitable[~.RedactImageResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'redact_image' not in self._stubs: + self._stubs['redact_image'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/RedactImage', + request_serializer=dlp.RedactImageRequest.serialize, + response_deserializer=dlp.RedactImageResponse.deserialize, + ) + return self._stubs['redact_image'] + + @property + def deidentify_content(self) -> Callable[ + [dlp.DeidentifyContentRequest], + Awaitable[dlp.DeidentifyContentResponse]]: + r"""Return a callable for the deidentify content method over gRPC. + + De-identifies potentially sensitive info from a + ContentItem. This method has limits on input size and + output size. See + https://cloud.google.com/dlp/docs/deidentify-sensitive- + data to learn more. + + When no InfoTypes or CustomInfoTypes are specified in + this request, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.DeidentifyContentRequest], + Awaitable[~.DeidentifyContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'deidentify_content' not in self._stubs: + self._stubs['deidentify_content'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeidentifyContent', + request_serializer=dlp.DeidentifyContentRequest.serialize, + response_deserializer=dlp.DeidentifyContentResponse.deserialize, + ) + return self._stubs['deidentify_content'] + + @property + def reidentify_content(self) -> Callable[ + [dlp.ReidentifyContentRequest], + Awaitable[dlp.ReidentifyContentResponse]]: + r"""Return a callable for the reidentify content method over gRPC. + + Re-identifies content that has been de-identified. See + https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example + to learn more. + + Returns: + Callable[[~.ReidentifyContentRequest], + Awaitable[~.ReidentifyContentResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'reidentify_content' not in self._stubs: + self._stubs['reidentify_content'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ReidentifyContent', + request_serializer=dlp.ReidentifyContentRequest.serialize, + response_deserializer=dlp.ReidentifyContentResponse.deserialize, + ) + return self._stubs['reidentify_content'] + + @property + def list_info_types(self) -> Callable[ + [dlp.ListInfoTypesRequest], + Awaitable[dlp.ListInfoTypesResponse]]: + r"""Return a callable for the list info types method over gRPC. + + Returns a list of the sensitive information types + that the DLP API supports. See + https://cloud.google.com/dlp/docs/infotypes-reference to + learn more. + + Returns: + Callable[[~.ListInfoTypesRequest], + Awaitable[~.ListInfoTypesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_info_types' not in self._stubs: + self._stubs['list_info_types'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListInfoTypes', + request_serializer=dlp.ListInfoTypesRequest.serialize, + response_deserializer=dlp.ListInfoTypesResponse.deserialize, + ) + return self._stubs['list_info_types'] + + @property + def create_inspect_template(self) -> Callable[ + [dlp.CreateInspectTemplateRequest], + Awaitable[dlp.InspectTemplate]]: + r"""Return a callable for the create inspect template method over gRPC. + + Creates an InspectTemplate for re-using frequently + used configuration for inspecting content, images, and + storage. See https://cloud.google.com/dlp/docs/creating- + templates to learn more. + + Returns: + Callable[[~.CreateInspectTemplateRequest], + Awaitable[~.InspectTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_inspect_template' not in self._stubs: + self._stubs['create_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateInspectTemplate', + request_serializer=dlp.CreateInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs['create_inspect_template'] + + @property + def update_inspect_template(self) -> Callable[ + [dlp.UpdateInspectTemplateRequest], + Awaitable[dlp.InspectTemplate]]: + r"""Return a callable for the update inspect template method over gRPC. + + Updates the InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.UpdateInspectTemplateRequest], + Awaitable[~.InspectTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_inspect_template' not in self._stubs: + self._stubs['update_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateInspectTemplate', + request_serializer=dlp.UpdateInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs['update_inspect_template'] + + @property + def get_inspect_template(self) -> Callable[ + [dlp.GetInspectTemplateRequest], + Awaitable[dlp.InspectTemplate]]: + r"""Return a callable for the get inspect template method over gRPC. + + Gets an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.GetInspectTemplateRequest], + Awaitable[~.InspectTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_inspect_template' not in self._stubs: + self._stubs['get_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetInspectTemplate', + request_serializer=dlp.GetInspectTemplateRequest.serialize, + response_deserializer=dlp.InspectTemplate.deserialize, + ) + return self._stubs['get_inspect_template'] + + @property + def list_inspect_templates(self) -> Callable[ + [dlp.ListInspectTemplatesRequest], + Awaitable[dlp.ListInspectTemplatesResponse]]: + r"""Return a callable for the list inspect templates method over gRPC. + + Lists InspectTemplates. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.ListInspectTemplatesRequest], + Awaitable[~.ListInspectTemplatesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_inspect_templates' not in self._stubs: + self._stubs['list_inspect_templates'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListInspectTemplates', + request_serializer=dlp.ListInspectTemplatesRequest.serialize, + response_deserializer=dlp.ListInspectTemplatesResponse.deserialize, + ) + return self._stubs['list_inspect_templates'] + + @property + def delete_inspect_template(self) -> Callable[ + [dlp.DeleteInspectTemplateRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete inspect template method over gRPC. + + Deletes an InspectTemplate. + See https://cloud.google.com/dlp/docs/creating-templates + to learn more. + + Returns: + Callable[[~.DeleteInspectTemplateRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_inspect_template' not in self._stubs: + self._stubs['delete_inspect_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteInspectTemplate', + request_serializer=dlp.DeleteInspectTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_inspect_template'] + + @property + def create_deidentify_template(self) -> Callable[ + [dlp.CreateDeidentifyTemplateRequest], + Awaitable[dlp.DeidentifyTemplate]]: + r"""Return a callable for the create deidentify template method over gRPC. + + Creates a DeidentifyTemplate for re-using frequently + used configuration for de-identifying content, images, + and storage. See + https://cloud.google.com/dlp/docs/creating-templates- + deid to learn more. + + Returns: + Callable[[~.CreateDeidentifyTemplateRequest], + Awaitable[~.DeidentifyTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_deidentify_template' not in self._stubs: + self._stubs['create_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateDeidentifyTemplate', + request_serializer=dlp.CreateDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs['create_deidentify_template'] + + @property + def update_deidentify_template(self) -> Callable[ + [dlp.UpdateDeidentifyTemplateRequest], + Awaitable[dlp.DeidentifyTemplate]]: + r"""Return a callable for the update deidentify template method over gRPC. + + Updates the DeidentifyTemplate. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Returns: + Callable[[~.UpdateDeidentifyTemplateRequest], + Awaitable[~.DeidentifyTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_deidentify_template' not in self._stubs: + self._stubs['update_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateDeidentifyTemplate', + request_serializer=dlp.UpdateDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs['update_deidentify_template'] + + @property + def get_deidentify_template(self) -> Callable[ + [dlp.GetDeidentifyTemplateRequest], + Awaitable[dlp.DeidentifyTemplate]]: + r"""Return a callable for the get deidentify template method over gRPC. + + Gets a DeidentifyTemplate. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Returns: + Callable[[~.GetDeidentifyTemplateRequest], + Awaitable[~.DeidentifyTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_deidentify_template' not in self._stubs: + self._stubs['get_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetDeidentifyTemplate', + request_serializer=dlp.GetDeidentifyTemplateRequest.serialize, + response_deserializer=dlp.DeidentifyTemplate.deserialize, + ) + return self._stubs['get_deidentify_template'] + + @property + def list_deidentify_templates(self) -> Callable[ + [dlp.ListDeidentifyTemplatesRequest], + Awaitable[dlp.ListDeidentifyTemplatesResponse]]: + r"""Return a callable for the list deidentify templates method over gRPC. + + Lists DeidentifyTemplates. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Returns: + Callable[[~.ListDeidentifyTemplatesRequest], + Awaitable[~.ListDeidentifyTemplatesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_deidentify_templates' not in self._stubs: + self._stubs['list_deidentify_templates'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListDeidentifyTemplates', + request_serializer=dlp.ListDeidentifyTemplatesRequest.serialize, + response_deserializer=dlp.ListDeidentifyTemplatesResponse.deserialize, + ) + return self._stubs['list_deidentify_templates'] + + @property + def delete_deidentify_template(self) -> Callable[ + [dlp.DeleteDeidentifyTemplateRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete deidentify template method over gRPC. + + Deletes a DeidentifyTemplate. + See https://cloud.google.com/dlp/docs/creating- + templates-deid to learn more. + + Returns: + Callable[[~.DeleteDeidentifyTemplateRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_deidentify_template' not in self._stubs: + self._stubs['delete_deidentify_template'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteDeidentifyTemplate', + request_serializer=dlp.DeleteDeidentifyTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_deidentify_template'] + + @property + def create_job_trigger(self) -> Callable[ + [dlp.CreateJobTriggerRequest], + Awaitable[dlp.JobTrigger]]: + r"""Return a callable for the create job trigger method over gRPC. + + Creates a job trigger to run DLP actions such as + scanning storage for sensitive information on a set + schedule. See + https://cloud.google.com/dlp/docs/creating-job-triggers + to learn more. + + Returns: + Callable[[~.CreateJobTriggerRequest], + Awaitable[~.JobTrigger]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_job_trigger' not in self._stubs: + self._stubs['create_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateJobTrigger', + request_serializer=dlp.CreateJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs['create_job_trigger'] + + @property + def update_job_trigger(self) -> Callable[ + [dlp.UpdateJobTriggerRequest], + Awaitable[dlp.JobTrigger]]: + r"""Return a callable for the update job trigger method over gRPC. + + Updates a job trigger. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Returns: + Callable[[~.UpdateJobTriggerRequest], + Awaitable[~.JobTrigger]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_job_trigger' not in self._stubs: + self._stubs['update_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateJobTrigger', + request_serializer=dlp.UpdateJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs['update_job_trigger'] + + @property + def hybrid_inspect_job_trigger(self) -> Callable[ + [dlp.HybridInspectJobTriggerRequest], + Awaitable[dlp.HybridInspectResponse]]: + r"""Return a callable for the hybrid inspect job trigger method over gRPC. + + Inspect hybrid content and store findings to a + trigger. The inspection will be processed + asynchronously. To review the findings monitor the jobs + within the trigger. + Early access feature is in a pre-release state and might + change or have limited support. For more information, + see + https://cloud.google.com/products#product-launch-stages. + + Returns: + Callable[[~.HybridInspectJobTriggerRequest], + Awaitable[~.HybridInspectResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'hybrid_inspect_job_trigger' not in self._stubs: + self._stubs['hybrid_inspect_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/HybridInspectJobTrigger', + request_serializer=dlp.HybridInspectJobTriggerRequest.serialize, + response_deserializer=dlp.HybridInspectResponse.deserialize, + ) + return self._stubs['hybrid_inspect_job_trigger'] + + @property + def get_job_trigger(self) -> Callable[ + [dlp.GetJobTriggerRequest], + Awaitable[dlp.JobTrigger]]: + r"""Return a callable for the get job trigger method over gRPC. + + Gets a job trigger. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Returns: + Callable[[~.GetJobTriggerRequest], + Awaitable[~.JobTrigger]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job_trigger' not in self._stubs: + self._stubs['get_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetJobTrigger', + request_serializer=dlp.GetJobTriggerRequest.serialize, + response_deserializer=dlp.JobTrigger.deserialize, + ) + return self._stubs['get_job_trigger'] + + @property + def list_job_triggers(self) -> Callable[ + [dlp.ListJobTriggersRequest], + Awaitable[dlp.ListJobTriggersResponse]]: + r"""Return a callable for the list job triggers method over gRPC. + + Lists job triggers. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Returns: + Callable[[~.ListJobTriggersRequest], + Awaitable[~.ListJobTriggersResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_job_triggers' not in self._stubs: + self._stubs['list_job_triggers'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListJobTriggers', + request_serializer=dlp.ListJobTriggersRequest.serialize, + response_deserializer=dlp.ListJobTriggersResponse.deserialize, + ) + return self._stubs['list_job_triggers'] + + @property + def delete_job_trigger(self) -> Callable[ + [dlp.DeleteJobTriggerRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete job trigger method over gRPC. + + Deletes a job trigger. + See https://cloud.google.com/dlp/docs/creating-job- + triggers to learn more. + + Returns: + Callable[[~.DeleteJobTriggerRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_job_trigger' not in self._stubs: + self._stubs['delete_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteJobTrigger', + request_serializer=dlp.DeleteJobTriggerRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_job_trigger'] + + @property + def activate_job_trigger(self) -> Callable[ + [dlp.ActivateJobTriggerRequest], + Awaitable[dlp.DlpJob]]: + r"""Return a callable for the activate job trigger method over gRPC. + + Activate a job trigger. Causes the immediate execute + of a trigger instead of waiting on the trigger event to + occur. + + Returns: + Callable[[~.ActivateJobTriggerRequest], + Awaitable[~.DlpJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'activate_job_trigger' not in self._stubs: + self._stubs['activate_job_trigger'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ActivateJobTrigger', + request_serializer=dlp.ActivateJobTriggerRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs['activate_job_trigger'] + + @property + def create_dlp_job(self) -> Callable[ + [dlp.CreateDlpJobRequest], + Awaitable[dlp.DlpJob]]: + r"""Return a callable for the create dlp job method over gRPC. + + Creates a new job to inspect storage or calculate + risk metrics. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + When no InfoTypes or CustomInfoTypes are specified in + inspect jobs, the system will automatically choose what + detectors to run. By default this may be all types, but + may change over time as detectors are updated. + + Returns: + Callable[[~.CreateDlpJobRequest], + Awaitable[~.DlpJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_dlp_job' not in self._stubs: + self._stubs['create_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateDlpJob', + request_serializer=dlp.CreateDlpJobRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs['create_dlp_job'] + + @property + def list_dlp_jobs(self) -> Callable[ + [dlp.ListDlpJobsRequest], + Awaitable[dlp.ListDlpJobsResponse]]: + r"""Return a callable for the list dlp jobs method over gRPC. + + Lists DlpJobs that match the specified filter in the + request. See + https://cloud.google.com/dlp/docs/inspecting-storage and + https://cloud.google.com/dlp/docs/compute-risk-analysis + to learn more. + + Returns: + Callable[[~.ListDlpJobsRequest], + Awaitable[~.ListDlpJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_dlp_jobs' not in self._stubs: + self._stubs['list_dlp_jobs'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListDlpJobs', + request_serializer=dlp.ListDlpJobsRequest.serialize, + response_deserializer=dlp.ListDlpJobsResponse.deserialize, + ) + return self._stubs['list_dlp_jobs'] + + @property + def get_dlp_job(self) -> Callable[ + [dlp.GetDlpJobRequest], + Awaitable[dlp.DlpJob]]: + r"""Return a callable for the get dlp job method over gRPC. + + Gets the latest state of a long-running DlpJob. + See https://cloud.google.com/dlp/docs/inspecting-storage + and https://cloud.google.com/dlp/docs/compute-risk- + analysis to learn more. + + Returns: + Callable[[~.GetDlpJobRequest], + Awaitable[~.DlpJob]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_dlp_job' not in self._stubs: + self._stubs['get_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetDlpJob', + request_serializer=dlp.GetDlpJobRequest.serialize, + response_deserializer=dlp.DlpJob.deserialize, + ) + return self._stubs['get_dlp_job'] + + @property + def delete_dlp_job(self) -> Callable[ + [dlp.DeleteDlpJobRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete dlp job method over gRPC. + + Deletes a long-running DlpJob. This method indicates + that the client is no longer interested in the DlpJob + result. The job will be cancelled if possible. + See https://cloud.google.com/dlp/docs/inspecting-storage + and https://cloud.google.com/dlp/docs/compute-risk- + analysis to learn more. + + Returns: + Callable[[~.DeleteDlpJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_dlp_job' not in self._stubs: + self._stubs['delete_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteDlpJob', + request_serializer=dlp.DeleteDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_dlp_job'] + + @property + def cancel_dlp_job(self) -> Callable[ + [dlp.CancelDlpJobRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the cancel dlp job method over gRPC. + + Starts asynchronous cancellation on a long-running + DlpJob. The server makes a best effort to cancel the + DlpJob, but success is not guaranteed. + See https://cloud.google.com/dlp/docs/inspecting-storage + and https://cloud.google.com/dlp/docs/compute-risk- + analysis to learn more. + + Returns: + Callable[[~.CancelDlpJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'cancel_dlp_job' not in self._stubs: + self._stubs['cancel_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CancelDlpJob', + request_serializer=dlp.CancelDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['cancel_dlp_job'] + + @property + def create_stored_info_type(self) -> Callable[ + [dlp.CreateStoredInfoTypeRequest], + Awaitable[dlp.StoredInfoType]]: + r"""Return a callable for the create stored info type method over gRPC. + + Creates a pre-built stored infoType to be used for + inspection. See + https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Returns: + Callable[[~.CreateStoredInfoTypeRequest], + Awaitable[~.StoredInfoType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_stored_info_type' not in self._stubs: + self._stubs['create_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/CreateStoredInfoType', + request_serializer=dlp.CreateStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs['create_stored_info_type'] + + @property + def update_stored_info_type(self) -> Callable[ + [dlp.UpdateStoredInfoTypeRequest], + Awaitable[dlp.StoredInfoType]]: + r"""Return a callable for the update stored info type method over gRPC. + + Updates the stored infoType by creating a new + version. The existing version will continue to be used + until the new version is ready. See + https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Returns: + Callable[[~.UpdateStoredInfoTypeRequest], + Awaitable[~.StoredInfoType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'update_stored_info_type' not in self._stubs: + self._stubs['update_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/UpdateStoredInfoType', + request_serializer=dlp.UpdateStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs['update_stored_info_type'] + + @property + def get_stored_info_type(self) -> Callable[ + [dlp.GetStoredInfoTypeRequest], + Awaitable[dlp.StoredInfoType]]: + r"""Return a callable for the get stored info type method over gRPC. + + Gets a stored infoType. + See https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Returns: + Callable[[~.GetStoredInfoTypeRequest], + Awaitable[~.StoredInfoType]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_stored_info_type' not in self._stubs: + self._stubs['get_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/GetStoredInfoType', + request_serializer=dlp.GetStoredInfoTypeRequest.serialize, + response_deserializer=dlp.StoredInfoType.deserialize, + ) + return self._stubs['get_stored_info_type'] + + @property + def list_stored_info_types(self) -> Callable[ + [dlp.ListStoredInfoTypesRequest], + Awaitable[dlp.ListStoredInfoTypesResponse]]: + r"""Return a callable for the list stored info types method over gRPC. + + Lists stored infoTypes. + See https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Returns: + Callable[[~.ListStoredInfoTypesRequest], + Awaitable[~.ListStoredInfoTypesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_stored_info_types' not in self._stubs: + self._stubs['list_stored_info_types'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/ListStoredInfoTypes', + request_serializer=dlp.ListStoredInfoTypesRequest.serialize, + response_deserializer=dlp.ListStoredInfoTypesResponse.deserialize, + ) + return self._stubs['list_stored_info_types'] + + @property + def delete_stored_info_type(self) -> Callable[ + [dlp.DeleteStoredInfoTypeRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete stored info type method over gRPC. + + Deletes a stored infoType. + See https://cloud.google.com/dlp/docs/creating-stored- + infotypes to learn more. + + Returns: + Callable[[~.DeleteStoredInfoTypeRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_stored_info_type' not in self._stubs: + self._stubs['delete_stored_info_type'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/DeleteStoredInfoType', + request_serializer=dlp.DeleteStoredInfoTypeRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_stored_info_type'] + + @property + def hybrid_inspect_dlp_job(self) -> Callable[ + [dlp.HybridInspectDlpJobRequest], + Awaitable[dlp.HybridInspectResponse]]: + r"""Return a callable for the hybrid inspect dlp job method over gRPC. + + Inspect hybrid content and store findings to a job. + To review the findings inspect the job. Inspection will + occur asynchronously. + Early access feature is in a pre-release state and might + change or have limited support. For more information, + see + https://cloud.google.com/products#product-launch-stages. + + Returns: + Callable[[~.HybridInspectDlpJobRequest], + Awaitable[~.HybridInspectResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'hybrid_inspect_dlp_job' not in self._stubs: + self._stubs['hybrid_inspect_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/HybridInspectDlpJob', + request_serializer=dlp.HybridInspectDlpJobRequest.serialize, + response_deserializer=dlp.HybridInspectResponse.deserialize, + ) + return self._stubs['hybrid_inspect_dlp_job'] + + @property + def finish_dlp_job(self) -> Callable[ + [dlp.FinishDlpJobRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the finish dlp job method over gRPC. + + Finish a running hybrid DlpJob. Triggers the + finalization steps and running of any enabled actions + that have not yet run. Early access feature is in a pre- + release state and might change or have limited support. + For more information, see + https://cloud.google.com/products#product-launch-stages. + + Returns: + Callable[[~.FinishDlpJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'finish_dlp_job' not in self._stubs: + self._stubs['finish_dlp_job'] = self.grpc_channel.unary_unary( + '/google.privacy.dlp.v2.DlpService/FinishDlpJob', + request_serializer=dlp.FinishDlpJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['finish_dlp_job'] + + +__all__ = ( + 'DlpServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py new file mode 100644 index 00000000..56197758 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py @@ -0,0 +1,332 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .dlp import ( + Action, + ActivateJobTriggerRequest, + AnalyzeDataSourceRiskDetails, + BoundingBox, + BucketingConfig, + ByteContentItem, + CancelDlpJobRequest, + CharacterMaskConfig, + CharsToIgnore, + Color, + Container, + ContentItem, + ContentLocation, + CreateDeidentifyTemplateRequest, + CreateDlpJobRequest, + CreateInspectTemplateRequest, + CreateJobTriggerRequest, + CreateStoredInfoTypeRequest, + CryptoDeterministicConfig, + CryptoHashConfig, + CryptoKey, + CryptoReplaceFfxFpeConfig, + DateShiftConfig, + DateTime, + DeidentifyConfig, + DeidentifyContentRequest, + DeidentifyContentResponse, + DeidentifyTemplate, + DeleteDeidentifyTemplateRequest, + DeleteDlpJobRequest, + DeleteInspectTemplateRequest, + DeleteJobTriggerRequest, + DeleteStoredInfoTypeRequest, + DlpJob, + DocumentLocation, + Error, + ExcludeInfoTypes, + ExclusionRule, + FieldTransformation, + Finding, + FinishDlpJobRequest, + FixedSizeBucketingConfig, + GetDeidentifyTemplateRequest, + GetDlpJobRequest, + GetInspectTemplateRequest, + GetJobTriggerRequest, + GetStoredInfoTypeRequest, + HybridContentItem, + HybridFindingDetails, + HybridInspectDlpJobRequest, + HybridInspectJobTriggerRequest, + HybridInspectResponse, + HybridInspectStatistics, + ImageLocation, + InfoTypeDescription, + InfoTypeStats, + InfoTypeTransformations, + InspectConfig, + InspectContentRequest, + InspectContentResponse, + InspectDataSourceDetails, + InspectionRule, + InspectionRuleSet, + InspectJobConfig, + InspectResult, + InspectTemplate, + JobTrigger, + KmsWrappedCryptoKey, + LargeCustomDictionaryConfig, + LargeCustomDictionaryStats, + ListDeidentifyTemplatesRequest, + ListDeidentifyTemplatesResponse, + ListDlpJobsRequest, + ListDlpJobsResponse, + ListInfoTypesRequest, + ListInfoTypesResponse, + ListInspectTemplatesRequest, + ListInspectTemplatesResponse, + ListJobTriggersRequest, + ListJobTriggersResponse, + ListStoredInfoTypesRequest, + ListStoredInfoTypesResponse, + Location, + Manual, + MetadataLocation, + OutputStorageConfig, + PrimitiveTransformation, + PrivacyMetric, + QuasiId, + QuoteInfo, + Range, + RecordCondition, + RecordLocation, + RecordSuppression, + RecordTransformations, + RedactConfig, + RedactImageRequest, + RedactImageResponse, + ReidentifyContentRequest, + ReidentifyContentResponse, + ReplaceValueConfig, + ReplaceWithInfoTypeConfig, + RiskAnalysisJobConfig, + Schedule, + StatisticalTable, + StorageMetadataLabel, + StoredInfoType, + StoredInfoTypeConfig, + StoredInfoTypeStats, + StoredInfoTypeVersion, + Table, + TableLocation, + TimePartConfig, + TransformationErrorHandling, + TransformationOverview, + TransformationSummary, + TransientCryptoKey, + UnwrappedCryptoKey, + UpdateDeidentifyTemplateRequest, + UpdateInspectTemplateRequest, + UpdateJobTriggerRequest, + UpdateStoredInfoTypeRequest, + Value, + ValueFrequency, + ContentOption, + DlpJobType, + InfoTypeSupportedBy, + MatchingType, + MetadataType, + RelationalOperator, + StoredInfoTypeState, +) +from .storage import ( + BigQueryField, + BigQueryKey, + BigQueryOptions, + BigQueryTable, + CloudStorageFileSet, + CloudStorageOptions, + CloudStoragePath, + CloudStorageRegexFileSet, + CustomInfoType, + DatastoreKey, + DatastoreOptions, + EntityId, + FieldId, + HybridOptions, + InfoType, + Key, + KindExpression, + PartitionId, + RecordKey, + StorageConfig, + StoredType, + TableOptions, + FileType, + Likelihood, +) + +__all__ = ( + 'Action', + 'ActivateJobTriggerRequest', + 'AnalyzeDataSourceRiskDetails', + 'BoundingBox', + 'BucketingConfig', + 'ByteContentItem', + 'CancelDlpJobRequest', + 'CharacterMaskConfig', + 'CharsToIgnore', + 'Color', + 'Container', + 'ContentItem', + 'ContentLocation', + 'CreateDeidentifyTemplateRequest', + 'CreateDlpJobRequest', + 'CreateInspectTemplateRequest', + 'CreateJobTriggerRequest', + 'CreateStoredInfoTypeRequest', + 'CryptoDeterministicConfig', + 'CryptoHashConfig', + 'CryptoKey', + 'CryptoReplaceFfxFpeConfig', + 'DateShiftConfig', + 'DateTime', + 'DeidentifyConfig', + 'DeidentifyContentRequest', + 'DeidentifyContentResponse', + 'DeidentifyTemplate', + 'DeleteDeidentifyTemplateRequest', + 'DeleteDlpJobRequest', + 'DeleteInspectTemplateRequest', + 'DeleteJobTriggerRequest', + 'DeleteStoredInfoTypeRequest', + 'DlpJob', + 'DocumentLocation', + 'Error', + 'ExcludeInfoTypes', + 'ExclusionRule', + 'FieldTransformation', + 'Finding', + 'FinishDlpJobRequest', + 'FixedSizeBucketingConfig', + 'GetDeidentifyTemplateRequest', + 'GetDlpJobRequest', + 'GetInspectTemplateRequest', + 'GetJobTriggerRequest', + 'GetStoredInfoTypeRequest', + 'HybridContentItem', + 'HybridFindingDetails', + 'HybridInspectDlpJobRequest', + 'HybridInspectJobTriggerRequest', + 'HybridInspectResponse', + 'HybridInspectStatistics', + 'ImageLocation', + 'InfoTypeDescription', + 'InfoTypeStats', + 'InfoTypeTransformations', + 'InspectConfig', + 'InspectContentRequest', + 'InspectContentResponse', + 'InspectDataSourceDetails', + 'InspectionRule', + 'InspectionRuleSet', + 'InspectJobConfig', + 'InspectResult', + 'InspectTemplate', + 'JobTrigger', + 'KmsWrappedCryptoKey', + 'LargeCustomDictionaryConfig', + 'LargeCustomDictionaryStats', + 'ListDeidentifyTemplatesRequest', + 'ListDeidentifyTemplatesResponse', + 'ListDlpJobsRequest', + 'ListDlpJobsResponse', + 'ListInfoTypesRequest', + 'ListInfoTypesResponse', + 'ListInspectTemplatesRequest', + 'ListInspectTemplatesResponse', + 'ListJobTriggersRequest', + 'ListJobTriggersResponse', + 'ListStoredInfoTypesRequest', + 'ListStoredInfoTypesResponse', + 'Location', + 'Manual', + 'MetadataLocation', + 'OutputStorageConfig', + 'PrimitiveTransformation', + 'PrivacyMetric', + 'QuasiId', + 'QuoteInfo', + 'Range', + 'RecordCondition', + 'RecordLocation', + 'RecordSuppression', + 'RecordTransformations', + 'RedactConfig', + 'RedactImageRequest', + 'RedactImageResponse', + 'ReidentifyContentRequest', + 'ReidentifyContentResponse', + 'ReplaceValueConfig', + 'ReplaceWithInfoTypeConfig', + 'RiskAnalysisJobConfig', + 'Schedule', + 'StatisticalTable', + 'StorageMetadataLabel', + 'StoredInfoType', + 'StoredInfoTypeConfig', + 'StoredInfoTypeStats', + 'StoredInfoTypeVersion', + 'Table', + 'TableLocation', + 'TimePartConfig', + 'TransformationErrorHandling', + 'TransformationOverview', + 'TransformationSummary', + 'TransientCryptoKey', + 'UnwrappedCryptoKey', + 'UpdateDeidentifyTemplateRequest', + 'UpdateInspectTemplateRequest', + 'UpdateJobTriggerRequest', + 'UpdateStoredInfoTypeRequest', + 'Value', + 'ValueFrequency', + 'ContentOption', + 'DlpJobType', + 'InfoTypeSupportedBy', + 'MatchingType', + 'MetadataType', + 'RelationalOperator', + 'StoredInfoTypeState', + 'BigQueryField', + 'BigQueryKey', + 'BigQueryOptions', + 'BigQueryTable', + 'CloudStorageFileSet', + 'CloudStorageOptions', + 'CloudStoragePath', + 'CloudStorageRegexFileSet', + 'CustomInfoType', + 'DatastoreKey', + 'DatastoreOptions', + 'EntityId', + 'FieldId', + 'HybridOptions', + 'InfoType', + 'Key', + 'KindExpression', + 'PartitionId', + 'RecordKey', + 'StorageConfig', + 'StoredType', + 'TableOptions', + 'FileType', + 'Likelihood', +) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py b/owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py new file mode 100644 index 00000000..d97d7b8c --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py @@ -0,0 +1,6338 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.dlp_v2.types import storage +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import empty_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.privacy.dlp.v2', + manifest={ + 'RelationalOperator', + 'MatchingType', + 'ContentOption', + 'MetadataType', + 'InfoTypeSupportedBy', + 'DlpJobType', + 'StoredInfoTypeState', + 'ExcludeInfoTypes', + 'ExclusionRule', + 'InspectionRule', + 'InspectionRuleSet', + 'InspectConfig', + 'ByteContentItem', + 'ContentItem', + 'Table', + 'InspectResult', + 'Finding', + 'Location', + 'ContentLocation', + 'MetadataLocation', + 'StorageMetadataLabel', + 'DocumentLocation', + 'RecordLocation', + 'TableLocation', + 'Container', + 'Range', + 'ImageLocation', + 'BoundingBox', + 'RedactImageRequest', + 'Color', + 'RedactImageResponse', + 'DeidentifyContentRequest', + 'DeidentifyContentResponse', + 'ReidentifyContentRequest', + 'ReidentifyContentResponse', + 'InspectContentRequest', + 'InspectContentResponse', + 'OutputStorageConfig', + 'InfoTypeStats', + 'InspectDataSourceDetails', + 'HybridInspectStatistics', + 'InfoTypeDescription', + 'ListInfoTypesRequest', + 'ListInfoTypesResponse', + 'RiskAnalysisJobConfig', + 'QuasiId', + 'StatisticalTable', + 'PrivacyMetric', + 'AnalyzeDataSourceRiskDetails', + 'ValueFrequency', + 'Value', + 'QuoteInfo', + 'DateTime', + 'DeidentifyConfig', + 'TransformationErrorHandling', + 'PrimitiveTransformation', + 'TimePartConfig', + 'CryptoHashConfig', + 'CryptoDeterministicConfig', + 'ReplaceValueConfig', + 'ReplaceWithInfoTypeConfig', + 'RedactConfig', + 'CharsToIgnore', + 'CharacterMaskConfig', + 'FixedSizeBucketingConfig', + 'BucketingConfig', + 'CryptoReplaceFfxFpeConfig', + 'CryptoKey', + 'TransientCryptoKey', + 'UnwrappedCryptoKey', + 'KmsWrappedCryptoKey', + 'DateShiftConfig', + 'InfoTypeTransformations', + 'FieldTransformation', + 'RecordTransformations', + 'RecordSuppression', + 'RecordCondition', + 'TransformationOverview', + 'TransformationSummary', + 'Schedule', + 'Manual', + 'InspectTemplate', + 'DeidentifyTemplate', + 'Error', + 'JobTrigger', + 'Action', + 'CreateInspectTemplateRequest', + 'UpdateInspectTemplateRequest', + 'GetInspectTemplateRequest', + 'ListInspectTemplatesRequest', + 'ListInspectTemplatesResponse', + 'DeleteInspectTemplateRequest', + 'CreateJobTriggerRequest', + 'ActivateJobTriggerRequest', + 'UpdateJobTriggerRequest', + 'GetJobTriggerRequest', + 'CreateDlpJobRequest', + 'ListJobTriggersRequest', + 'ListJobTriggersResponse', + 'DeleteJobTriggerRequest', + 'InspectJobConfig', + 'DlpJob', + 'GetDlpJobRequest', + 'ListDlpJobsRequest', + 'ListDlpJobsResponse', + 'CancelDlpJobRequest', + 'FinishDlpJobRequest', + 'DeleteDlpJobRequest', + 'CreateDeidentifyTemplateRequest', + 'UpdateDeidentifyTemplateRequest', + 'GetDeidentifyTemplateRequest', + 'ListDeidentifyTemplatesRequest', + 'ListDeidentifyTemplatesResponse', + 'DeleteDeidentifyTemplateRequest', + 'LargeCustomDictionaryConfig', + 'LargeCustomDictionaryStats', + 'StoredInfoTypeConfig', + 'StoredInfoTypeStats', + 'StoredInfoTypeVersion', + 'StoredInfoType', + 'CreateStoredInfoTypeRequest', + 'UpdateStoredInfoTypeRequest', + 'GetStoredInfoTypeRequest', + 'ListStoredInfoTypesRequest', + 'ListStoredInfoTypesResponse', + 'DeleteStoredInfoTypeRequest', + 'HybridInspectJobTriggerRequest', + 'HybridInspectDlpJobRequest', + 'HybridContentItem', + 'HybridFindingDetails', + 'HybridInspectResponse', + }, +) + + +class RelationalOperator(proto.Enum): + r"""Operators available for comparing the value of fields.""" + RELATIONAL_OPERATOR_UNSPECIFIED = 0 + EQUAL_TO = 1 + NOT_EQUAL_TO = 2 + GREATER_THAN = 3 + LESS_THAN = 4 + GREATER_THAN_OR_EQUALS = 5 + LESS_THAN_OR_EQUALS = 6 + EXISTS = 7 + + +class MatchingType(proto.Enum): + r"""Type of the match which can be applied to different ways of + matching, like Dictionary, regular expression and intersecting + with findings of another info type. + """ + MATCHING_TYPE_UNSPECIFIED = 0 + MATCHING_TYPE_FULL_MATCH = 1 + MATCHING_TYPE_PARTIAL_MATCH = 2 + MATCHING_TYPE_INVERSE_MATCH = 3 + + +class ContentOption(proto.Enum): + r"""Options describing which parts of the provided content should + be scanned. + """ + CONTENT_UNSPECIFIED = 0 + CONTENT_TEXT = 1 + CONTENT_IMAGE = 2 + + +class MetadataType(proto.Enum): + r"""Type of metadata containing the finding.""" + METADATATYPE_UNSPECIFIED = 0 + STORAGE_METADATA = 2 + + +class InfoTypeSupportedBy(proto.Enum): + r"""Parts of the APIs which use certain infoTypes.""" + ENUM_TYPE_UNSPECIFIED = 0 + INSPECT = 1 + RISK_ANALYSIS = 2 + + +class DlpJobType(proto.Enum): + r"""An enum to represent the various types of DLP jobs.""" + DLP_JOB_TYPE_UNSPECIFIED = 0 + INSPECT_JOB = 1 + RISK_ANALYSIS_JOB = 2 + + +class StoredInfoTypeState(proto.Enum): + r"""State of a StoredInfoType version.""" + STORED_INFO_TYPE_STATE_UNSPECIFIED = 0 + PENDING = 1 + READY = 2 + FAILED = 3 + INVALID = 4 + + +class ExcludeInfoTypes(proto.Message): + r"""List of exclude infoTypes. + Attributes: + info_types (Sequence[google.cloud.dlp_v2.types.InfoType]): + InfoType list in ExclusionRule rule drops a finding when it + overlaps or contained within with a finding of an infoType + from this list. For example, for + ``InspectionRuleSet.info_types`` containing + "PHONE_NUMBER"``and``\ exclusion_rule\ ``containing``\ exclude_info_types.info_types\` + with "EMAIL_ADDRESS" the phone number findings are dropped + if they overlap with EMAIL_ADDRESS finding. That leads to + "555-222-2222@example.org" to generate only a single + finding, namely email address. + """ + + info_types = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + + +class ExclusionRule(proto.Message): + r"""The rule that specifies conditions when findings of infoTypes + specified in ``InspectionRuleSet`` are removed from results. + + Attributes: + dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): + Dictionary which defines the rule. + regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): + Regular expression which defines the rule. + exclude_info_types (google.cloud.dlp_v2.types.ExcludeInfoTypes): + Set of infoTypes for which findings would + affect this rule. + matching_type (google.cloud.dlp_v2.types.MatchingType): + How the rule is applied, see MatchingType + documentation for details. + """ + + dictionary = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message=storage.CustomInfoType.Dictionary, + ) + regex = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message=storage.CustomInfoType.Regex, + ) + exclude_info_types = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message='ExcludeInfoTypes', + ) + matching_type = proto.Field( + proto.ENUM, + number=4, + enum='MatchingType', + ) + + +class InspectionRule(proto.Message): + r"""A single inspection rule to be applied to infoTypes, specified in + ``InspectionRuleSet``. + + Attributes: + hotword_rule (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.HotwordRule): + Hotword-based detection rule. + exclusion_rule (google.cloud.dlp_v2.types.ExclusionRule): + Exclusion rule. + """ + + hotword_rule = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message=storage.CustomInfoType.DetectionRule.HotwordRule, + ) + exclusion_rule = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message='ExclusionRule', + ) + + +class InspectionRuleSet(proto.Message): + r"""Rule set for modifying a set of infoTypes to alter behavior + under certain circumstances, depending on the specific details + of the rules within the set. + + Attributes: + info_types (Sequence[google.cloud.dlp_v2.types.InfoType]): + List of infoTypes this rule set is applied + to. + rules (Sequence[google.cloud.dlp_v2.types.InspectionRule]): + Set of rules to be applied to infoTypes. The + rules are applied in order. + """ + + info_types = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + rules = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='InspectionRule', + ) + + +class InspectConfig(proto.Message): + r"""Configuration description of the scanning process. When used with + redactContent only info_types and min_likelihood are currently used. + + Attributes: + info_types (Sequence[google.cloud.dlp_v2.types.InfoType]): + Restricts what info_types to look for. The values must + correspond to InfoType values returned by ListInfoTypes or + listed at + https://cloud.google.com/dlp/docs/infotypes-reference. + + When no InfoTypes or CustomInfoTypes are specified in a + request, the system may automatically choose what detectors + to run. By default this may be all types, but may change + over time as detectors are updated. + + If you need precise control and predictability as to what + detectors are run you should specify specific InfoTypes + listed in the reference, otherwise a default list will be + used, which may change over time. + min_likelihood (google.cloud.dlp_v2.types.Likelihood): + Only returns findings equal or above this + threshold. The default is POSSIBLE. + See https://cloud.google.com/dlp/docs/likelihood + to learn more. + limits (google.cloud.dlp_v2.types.InspectConfig.FindingLimits): + Configuration to control the number of + findings returned. + include_quote (bool): + When true, a contextual quote from the data + that triggered a finding is included in the + response; see Finding.quote. + exclude_info_types (bool): + When true, excludes type information of the + findings. + custom_info_types (Sequence[google.cloud.dlp_v2.types.CustomInfoType]): + CustomInfoTypes provided by the user. See + https://cloud.google.com/dlp/docs/creating- + custom-infotypes to learn more. + content_options (Sequence[google.cloud.dlp_v2.types.ContentOption]): + List of options defining data content to + scan. If empty, text, images, and other content + will be included. + rule_set (Sequence[google.cloud.dlp_v2.types.InspectionRuleSet]): + Set of rules to apply to the findings for + this InspectConfig. Exclusion rules, contained + in the set are executed in the end, other rules + are executed in the order they are specified for + each info type. + """ + + class FindingLimits(proto.Message): + r"""Configuration to control the number of findings returned. + Attributes: + max_findings_per_item (int): + Max number of findings that will be returned for each item + scanned. When set within ``InspectJobConfig``, the maximum + returned is 2000 regardless if this is set higher. When set + within ``InspectContentRequest``, this field is ignored. + max_findings_per_request (int): + Max number of findings that will be returned per + request/job. When set within ``InspectContentRequest``, the + maximum returned is 2000 regardless if this is set higher. + max_findings_per_info_type (Sequence[google.cloud.dlp_v2.types.InspectConfig.FindingLimits.InfoTypeLimit]): + Configuration of findings limit given for + specified infoTypes. + """ + + class InfoTypeLimit(proto.Message): + r"""Max findings configuration per infoType, per content item or + long running DlpJob. + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + Type of information the findings limit applies to. Only one + limit per info_type should be provided. If InfoTypeLimit + does not have an info_type, the DLP API applies the limit + against all info_types that are found but not specified in + another InfoTypeLimit. + max_findings (int): + Max findings limit for the given infoType. + """ + + info_type = proto.Field( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + max_findings = proto.Field( + proto.INT32, + number=2, + ) + + max_findings_per_item = proto.Field( + proto.INT32, + number=1, + ) + max_findings_per_request = proto.Field( + proto.INT32, + number=2, + ) + max_findings_per_info_type = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='InspectConfig.FindingLimits.InfoTypeLimit', + ) + + info_types = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + min_likelihood = proto.Field( + proto.ENUM, + number=2, + enum=storage.Likelihood, + ) + limits = proto.Field( + proto.MESSAGE, + number=3, + message=FindingLimits, + ) + include_quote = proto.Field( + proto.BOOL, + number=4, + ) + exclude_info_types = proto.Field( + proto.BOOL, + number=5, + ) + custom_info_types = proto.RepeatedField( + proto.MESSAGE, + number=6, + message=storage.CustomInfoType, + ) + content_options = proto.RepeatedField( + proto.ENUM, + number=8, + enum='ContentOption', + ) + rule_set = proto.RepeatedField( + proto.MESSAGE, + number=10, + message='InspectionRuleSet', + ) + + +class ByteContentItem(proto.Message): + r"""Container for bytes to inspect or redact. + Attributes: + type_ (google.cloud.dlp_v2.types.ByteContentItem.BytesType): + The type of data stored in the bytes string. Default will be + TEXT_UTF8. + data (bytes): + Content data to inspect or redact. + """ + class BytesType(proto.Enum): + r"""The type of data being sent for inspection.""" + BYTES_TYPE_UNSPECIFIED = 0 + IMAGE = 6 + IMAGE_JPEG = 1 + IMAGE_BMP = 2 + IMAGE_PNG = 3 + IMAGE_SVG = 4 + TEXT_UTF8 = 5 + WORD_DOCUMENT = 7 + PDF = 8 + AVRO = 11 + CSV = 12 + TSV = 13 + + type_ = proto.Field( + proto.ENUM, + number=1, + enum=BytesType, + ) + data = proto.Field( + proto.BYTES, + number=2, + ) + + +class ContentItem(proto.Message): + r"""Container structure for the content to inspect. + Attributes: + value (str): + String data to inspect or redact. + table (google.cloud.dlp_v2.types.Table): + Structured content for inspection. See + https://cloud.google.com/dlp/docs/inspecting-text#inspecting_a_table + to learn more. + byte_item (google.cloud.dlp_v2.types.ByteContentItem): + Content data to inspect or redact. Replaces ``type`` and + ``data``. + """ + + value = proto.Field( + proto.STRING, + number=3, + oneof='data_item', + ) + table = proto.Field( + proto.MESSAGE, + number=4, + oneof='data_item', + message='Table', + ) + byte_item = proto.Field( + proto.MESSAGE, + number=5, + oneof='data_item', + message='ByteContentItem', + ) + + +class Table(proto.Message): + r"""Structured content to inspect. Up to 50,000 ``Value``\ s per request + allowed. See + https://cloud.google.com/dlp/docs/inspecting-text#inspecting_a_table + to learn more. + + Attributes: + headers (Sequence[google.cloud.dlp_v2.types.FieldId]): + Headers of the table. + rows (Sequence[google.cloud.dlp_v2.types.Table.Row]): + Rows of the table. + """ + + class Row(proto.Message): + r"""Values of the row. + Attributes: + values (Sequence[google.cloud.dlp_v2.types.Value]): + Individual cells. + """ + + values = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Value', + ) + + headers = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + rows = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=Row, + ) + + +class InspectResult(proto.Message): + r"""All the findings for a single scanned item. + Attributes: + findings (Sequence[google.cloud.dlp_v2.types.Finding]): + List of findings for an item. + findings_truncated (bool): + If true, then this item might have more + findings than were returned, and the findings + returned are an arbitrary subset of all + findings. The findings list might be truncated + because the input items were too large, or + because the server reached the maximum amount of + resources allowed for a single API call. For + best results, divide the input into smaller + batches. + """ + + findings = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Finding', + ) + findings_truncated = proto.Field( + proto.BOOL, + number=2, + ) + + +class Finding(proto.Message): + r"""Represents a piece of potentially sensitive content. + Attributes: + name (str): + Resource name in format + projects/{project}/locations/{location}/findings/{finding} + Populated only when viewing persisted findings. + quote (str): + The content that was found. Even if the content is not + textual, it may be converted to a textual representation + here. Provided if ``include_quote`` is true and the finding + is less than or equal to 4096 bytes long. If the finding + exceeds 4096 bytes in length, the quote may be omitted. + info_type (google.cloud.dlp_v2.types.InfoType): + The type of content that might have been found. Provided if + ``excluded_types`` is false. + likelihood (google.cloud.dlp_v2.types.Likelihood): + Confidence of how likely it is that the ``info_type`` is + correct. + location (google.cloud.dlp_v2.types.Location): + Where the content was found. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp when finding was detected. + quote_info (google.cloud.dlp_v2.types.QuoteInfo): + Contains data parsed from quotes. Only populated if + include_quote was set to true and a supported infoType was + requested. Currently supported infoTypes: DATE, + DATE_OF_BIRTH and TIME. + resource_name (str): + The job that stored the finding. + trigger_name (str): + Job trigger name, if applicable, for this + finding. + labels (Sequence[google.cloud.dlp_v2.types.Finding.LabelsEntry]): + The labels associated with this ``Finding``. + + Label keys must be between 1 and 63 characters long and must + conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + + Label values must be between 0 and 63 characters long and + must conform to the regular expression + ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. + + No more than 10 labels can be associated with a given + finding. + + Examples: + + - ``"environment" : "production"`` + - ``"pipeline" : "etl"`` + job_create_time (google.protobuf.timestamp_pb2.Timestamp): + Time the job started that produced this + finding. + job_name (str): + The job that stored the finding. + """ + + name = proto.Field( + proto.STRING, + number=14, + ) + quote = proto.Field( + proto.STRING, + number=1, + ) + info_type = proto.Field( + proto.MESSAGE, + number=2, + message=storage.InfoType, + ) + likelihood = proto.Field( + proto.ENUM, + number=3, + enum=storage.Likelihood, + ) + location = proto.Field( + proto.MESSAGE, + number=4, + message='Location', + ) + create_time = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + quote_info = proto.Field( + proto.MESSAGE, + number=7, + message='QuoteInfo', + ) + resource_name = proto.Field( + proto.STRING, + number=8, + ) + trigger_name = proto.Field( + proto.STRING, + number=9, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=10, + ) + job_create_time = proto.Field( + proto.MESSAGE, + number=11, + message=timestamp_pb2.Timestamp, + ) + job_name = proto.Field( + proto.STRING, + number=13, + ) + + +class Location(proto.Message): + r"""Specifies the location of the finding. + Attributes: + byte_range (google.cloud.dlp_v2.types.Range): + Zero-based byte offsets delimiting the + finding. These are relative to the finding's + containing element. Note that when the content + is not textual, this references the UTF-8 + encoded textual representation of the content. + Omitted if content is an image. + codepoint_range (google.cloud.dlp_v2.types.Range): + Unicode character offsets delimiting the + finding. These are relative to the finding's + containing element. Provided when the content is + text. + content_locations (Sequence[google.cloud.dlp_v2.types.ContentLocation]): + List of nested objects pointing to the + precise location of the finding within the file + or record. + container (google.cloud.dlp_v2.types.Container): + Information about the container where this + finding occurred, if available. + """ + + byte_range = proto.Field( + proto.MESSAGE, + number=1, + message='Range', + ) + codepoint_range = proto.Field( + proto.MESSAGE, + number=2, + message='Range', + ) + content_locations = proto.RepeatedField( + proto.MESSAGE, + number=7, + message='ContentLocation', + ) + container = proto.Field( + proto.MESSAGE, + number=8, + message='Container', + ) + + +class ContentLocation(proto.Message): + r"""Precise location of the finding within a document, record, + image, or metadata container. + + Attributes: + container_name (str): + Name of the container where the finding is located. The top + level name is the source file name or table name. Names of + some common storage containers are formatted as follows: + + - BigQuery tables: ``{project_id}:{dataset_id}.{table_id}`` + - Cloud Storage files: ``gs://{bucket}/{path}`` + - Datastore namespace: {namespace} + + Nested names could be absent if the embedded object has no + string identifier (for an example an image contained within + a document). + record_location (google.cloud.dlp_v2.types.RecordLocation): + Location within a row or record of a database + table. + image_location (google.cloud.dlp_v2.types.ImageLocation): + Location within an image's pixels. + document_location (google.cloud.dlp_v2.types.DocumentLocation): + Location data for document files. + metadata_location (google.cloud.dlp_v2.types.MetadataLocation): + Location within the metadata for inspected + content. + container_timestamp (google.protobuf.timestamp_pb2.Timestamp): + Findings container modification timestamp, if applicable. + For Google Cloud Storage contains last file modification + timestamp. For BigQuery table contains last_modified_time + property. For Datastore - not populated. + container_version (str): + Findings container version, if available + ("generation" for Google Cloud Storage). + """ + + container_name = proto.Field( + proto.STRING, + number=1, + ) + record_location = proto.Field( + proto.MESSAGE, + number=2, + oneof='location', + message='RecordLocation', + ) + image_location = proto.Field( + proto.MESSAGE, + number=3, + oneof='location', + message='ImageLocation', + ) + document_location = proto.Field( + proto.MESSAGE, + number=5, + oneof='location', + message='DocumentLocation', + ) + metadata_location = proto.Field( + proto.MESSAGE, + number=8, + oneof='location', + message='MetadataLocation', + ) + container_timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + container_version = proto.Field( + proto.STRING, + number=7, + ) + + +class MetadataLocation(proto.Message): + r"""Metadata Location + Attributes: + type_ (google.cloud.dlp_v2.types.MetadataType): + Type of metadata containing the finding. + storage_label (google.cloud.dlp_v2.types.StorageMetadataLabel): + Storage metadata. + """ + + type_ = proto.Field( + proto.ENUM, + number=1, + enum='MetadataType', + ) + storage_label = proto.Field( + proto.MESSAGE, + number=3, + oneof='label', + message='StorageMetadataLabel', + ) + + +class StorageMetadataLabel(proto.Message): + r"""Storage metadata label to indicate which metadata entry + contains findings. + + Attributes: + key (str): + + """ + + key = proto.Field( + proto.STRING, + number=1, + ) + + +class DocumentLocation(proto.Message): + r"""Location of a finding within a document. + Attributes: + file_offset (int): + Offset of the line, from the beginning of the + file, where the finding is located. + """ + + file_offset = proto.Field( + proto.INT64, + number=1, + ) + + +class RecordLocation(proto.Message): + r"""Location of a finding within a row or record. + Attributes: + record_key (google.cloud.dlp_v2.types.RecordKey): + Key of the finding. + field_id (google.cloud.dlp_v2.types.FieldId): + Field id of the field containing the finding. + table_location (google.cloud.dlp_v2.types.TableLocation): + Location within a ``ContentItem.Table``. + """ + + record_key = proto.Field( + proto.MESSAGE, + number=1, + message=storage.RecordKey, + ) + field_id = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + table_location = proto.Field( + proto.MESSAGE, + number=3, + message='TableLocation', + ) + + +class TableLocation(proto.Message): + r"""Location of a finding within a table. + Attributes: + row_index (int): + The zero-based index of the row where the finding is + located. Only populated for resources that have a natural + ordering, not BigQuery. In BigQuery, to identify the row a + finding came from, populate + BigQueryOptions.identifying_fields with your primary key + column names and when you store the findings the value of + those columns will be stored inside of Finding. + """ + + row_index = proto.Field( + proto.INT64, + number=1, + ) + + +class Container(proto.Message): + r"""Represents a container that may contain DLP findings. + Examples of a container include a file, table, or database + record. + + Attributes: + type_ (str): + Container type, for example BigQuery or + Google Cloud Storage. + project_id (str): + Project where the finding was found. + Can be different from the project that owns the + finding. + full_path (str): + A string representation of the full container + name. Examples: + - BigQuery: 'Project:DataSetId.TableId' + - Google Cloud Storage: + 'gs://Bucket/folders/filename.txt' + root_path (str): + The root of the container. Examples: + + - For BigQuery table ``project_id:dataset_id.table_id``, + the root is ``dataset_id`` + - For Google Cloud Storage file + ``gs://bucket/folder/filename.txt``, the root is + ``gs://bucket`` + relative_path (str): + The rest of the path after the root. Examples: + + - For BigQuery table ``project_id:dataset_id.table_id``, + the relative path is ``table_id`` + - Google Cloud Storage file + ``gs://bucket/folder/filename.txt``, the relative path is + ``folder/filename.txt`` + update_time (google.protobuf.timestamp_pb2.Timestamp): + Findings container modification timestamp, if applicable. + For Google Cloud Storage contains last file modification + timestamp. For BigQuery table contains last_modified_time + property. For Datastore - not populated. + version (str): + Findings container version, if available + ("generation" for Google Cloud Storage). + """ + + type_ = proto.Field( + proto.STRING, + number=1, + ) + project_id = proto.Field( + proto.STRING, + number=2, + ) + full_path = proto.Field( + proto.STRING, + number=3, + ) + root_path = proto.Field( + proto.STRING, + number=4, + ) + relative_path = proto.Field( + proto.STRING, + number=5, + ) + update_time = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + version = proto.Field( + proto.STRING, + number=7, + ) + + +class Range(proto.Message): + r"""Generic half-open interval [start, end) + Attributes: + start (int): + Index of the first character of the range + (inclusive). + end (int): + Index of the last character of the range + (exclusive). + """ + + start = proto.Field( + proto.INT64, + number=1, + ) + end = proto.Field( + proto.INT64, + number=2, + ) + + +class ImageLocation(proto.Message): + r"""Location of the finding within an image. + Attributes: + bounding_boxes (Sequence[google.cloud.dlp_v2.types.BoundingBox]): + Bounding boxes locating the pixels within the + image containing the finding. + """ + + bounding_boxes = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='BoundingBox', + ) + + +class BoundingBox(proto.Message): + r"""Bounding box encompassing detected text within an image. + Attributes: + top (int): + Top coordinate of the bounding box. (0,0) is + upper left. + left (int): + Left coordinate of the bounding box. (0,0) is + upper left. + width (int): + Width of the bounding box in pixels. + height (int): + Height of the bounding box in pixels. + """ + + top = proto.Field( + proto.INT32, + number=1, + ) + left = proto.Field( + proto.INT32, + number=2, + ) + width = proto.Field( + proto.INT32, + number=3, + ) + height = proto.Field( + proto.INT32, + number=4, + ) + + +class RedactImageRequest(proto.Message): + r"""Request to search for potentially sensitive info in an image + and redact it by covering it with a colored rectangle. + + Attributes: + parent (str): + Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + location_id (str): + Deprecated. This field has no effect. + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + Configuration for the inspector. + image_redaction_configs (Sequence[google.cloud.dlp_v2.types.RedactImageRequest.ImageRedactionConfig]): + The configuration for specifying what content + to redact from images. + include_findings (bool): + Whether the response should include findings + along with the redacted image. + byte_item (google.cloud.dlp_v2.types.ByteContentItem): + The content must be PNG, JPEG, SVG or BMP. + """ + + class ImageRedactionConfig(proto.Message): + r"""Configuration for determining how redaction of images should + occur. + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + Only one per info_type should be provided per request. If + not specified, and redact_all_text is false, the DLP API + will redact all text that it matches against all info_types + that are found, but not specified in another + ImageRedactionConfig. + redact_all_text (bool): + If true, all text found in the image, regardless whether it + matches an info_type, is redacted. Only one should be + provided. + redaction_color (google.cloud.dlp_v2.types.Color): + The color to use when redacting content from + an image. If not specified, the default is + black. + """ + + info_type = proto.Field( + proto.MESSAGE, + number=1, + oneof='target', + message=storage.InfoType, + ) + redact_all_text = proto.Field( + proto.BOOL, + number=2, + oneof='target', + ) + redaction_color = proto.Field( + proto.MESSAGE, + number=3, + message='Color', + ) + + parent = proto.Field( + proto.STRING, + number=1, + ) + location_id = proto.Field( + proto.STRING, + number=8, + ) + inspect_config = proto.Field( + proto.MESSAGE, + number=2, + message='InspectConfig', + ) + image_redaction_configs = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=ImageRedactionConfig, + ) + include_findings = proto.Field( + proto.BOOL, + number=6, + ) + byte_item = proto.Field( + proto.MESSAGE, + number=7, + message='ByteContentItem', + ) + + +class Color(proto.Message): + r"""Represents a color in the RGB color space. + Attributes: + red (float): + The amount of red in the color as a value in the interval + [0, 1]. + green (float): + The amount of green in the color as a value in the interval + [0, 1]. + blue (float): + The amount of blue in the color as a value in the interval + [0, 1]. + """ + + red = proto.Field( + proto.FLOAT, + number=1, + ) + green = proto.Field( + proto.FLOAT, + number=2, + ) + blue = proto.Field( + proto.FLOAT, + number=3, + ) + + +class RedactImageResponse(proto.Message): + r"""Results of redacting an image. + Attributes: + redacted_image (bytes): + The redacted image. The type will be the same + as the original image. + extracted_text (str): + If an image was being inspected and the InspectConfig's + include_quote was set to true, then this field will include + all text, if any, that was found in the image. + inspect_result (google.cloud.dlp_v2.types.InspectResult): + The findings. Populated when include_findings in the request + is true. + """ + + redacted_image = proto.Field( + proto.BYTES, + number=1, + ) + extracted_text = proto.Field( + proto.STRING, + number=2, + ) + inspect_result = proto.Field( + proto.MESSAGE, + number=3, + message='InspectResult', + ) + + +class DeidentifyContentRequest(proto.Message): + r"""Request to de-identify a list of items. + Attributes: + parent (str): + Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + deidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): + Configuration for the de-identification of the content item. + Items specified here will override the template referenced + by the deidentify_template_name argument. + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + Configuration for the inspector. Items specified here will + override the template referenced by the + inspect_template_name argument. + item (google.cloud.dlp_v2.types.ContentItem): + The item to de-identify. Will be treated as + text. + inspect_template_name (str): + Template to use. Any configuration directly specified in + inspect_config will override those set in the template. + Singular fields that are set in this request will replace + their corresponding fields in the template. Repeated fields + are appended. Singular sub-messages and groups are + recursively merged. + deidentify_template_name (str): + Template to use. Any configuration directly specified in + deidentify_config will override those set in the template. + Singular fields that are set in this request will replace + their corresponding fields in the template. Repeated fields + are appended. Singular sub-messages and groups are + recursively merged. + location_id (str): + Deprecated. This field has no effect. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + deidentify_config = proto.Field( + proto.MESSAGE, + number=2, + message='DeidentifyConfig', + ) + inspect_config = proto.Field( + proto.MESSAGE, + number=3, + message='InspectConfig', + ) + item = proto.Field( + proto.MESSAGE, + number=4, + message='ContentItem', + ) + inspect_template_name = proto.Field( + proto.STRING, + number=5, + ) + deidentify_template_name = proto.Field( + proto.STRING, + number=6, + ) + location_id = proto.Field( + proto.STRING, + number=7, + ) + + +class DeidentifyContentResponse(proto.Message): + r"""Results of de-identifying a ContentItem. + Attributes: + item (google.cloud.dlp_v2.types.ContentItem): + The de-identified item. + overview (google.cloud.dlp_v2.types.TransformationOverview): + An overview of the changes that were made on the ``item``. + """ + + item = proto.Field( + proto.MESSAGE, + number=1, + message='ContentItem', + ) + overview = proto.Field( + proto.MESSAGE, + number=2, + message='TransformationOverview', + ) + + +class ReidentifyContentRequest(proto.Message): + r"""Request to re-identify an item. + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + reidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): + Configuration for the re-identification of the content item. + This field shares the same proto message type that is used + for de-identification, however its usage here is for the + reversal of the previous de-identification. + Re-identification is performed by examining the + transformations used to de-identify the items and executing + the reverse. This requires that only reversible + transformations be provided here. The reversible + transformations are: + + - ``CryptoDeterministicConfig`` + - ``CryptoReplaceFfxFpeConfig`` + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + Configuration for the inspector. + item (google.cloud.dlp_v2.types.ContentItem): + The item to re-identify. Will be treated as + text. + inspect_template_name (str): + Template to use. Any configuration directly specified in + ``inspect_config`` will override those set in the template. + Singular fields that are set in this request will replace + their corresponding fields in the template. Repeated fields + are appended. Singular sub-messages and groups are + recursively merged. + reidentify_template_name (str): + Template to use. References an instance of + ``DeidentifyTemplate``. Any configuration directly specified + in ``reidentify_config`` or ``inspect_config`` will override + those set in the template. The ``DeidentifyTemplate`` used + must include only reversible transformations. Singular + fields that are set in this request will replace their + corresponding fields in the template. Repeated fields are + appended. Singular sub-messages and groups are recursively + merged. + location_id (str): + Deprecated. This field has no effect. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + reidentify_config = proto.Field( + proto.MESSAGE, + number=2, + message='DeidentifyConfig', + ) + inspect_config = proto.Field( + proto.MESSAGE, + number=3, + message='InspectConfig', + ) + item = proto.Field( + proto.MESSAGE, + number=4, + message='ContentItem', + ) + inspect_template_name = proto.Field( + proto.STRING, + number=5, + ) + reidentify_template_name = proto.Field( + proto.STRING, + number=6, + ) + location_id = proto.Field( + proto.STRING, + number=7, + ) + + +class ReidentifyContentResponse(proto.Message): + r"""Results of re-identifying a item. + Attributes: + item (google.cloud.dlp_v2.types.ContentItem): + The re-identified item. + overview (google.cloud.dlp_v2.types.TransformationOverview): + An overview of the changes that were made to the ``item``. + """ + + item = proto.Field( + proto.MESSAGE, + number=1, + message='ContentItem', + ) + overview = proto.Field( + proto.MESSAGE, + number=2, + message='TransformationOverview', + ) + + +class InspectContentRequest(proto.Message): + r"""Request to search for potentially sensitive info in a + ContentItem. + + Attributes: + parent (str): + Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + Configuration for the inspector. What specified here will + override the template referenced by the + inspect_template_name argument. + item (google.cloud.dlp_v2.types.ContentItem): + The item to inspect. + inspect_template_name (str): + Template to use. Any configuration directly specified in + inspect_config will override those set in the template. + Singular fields that are set in this request will replace + their corresponding fields in the template. Repeated fields + are appended. Singular sub-messages and groups are + recursively merged. + location_id (str): + Deprecated. This field has no effect. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + inspect_config = proto.Field( + proto.MESSAGE, + number=2, + message='InspectConfig', + ) + item = proto.Field( + proto.MESSAGE, + number=3, + message='ContentItem', + ) + inspect_template_name = proto.Field( + proto.STRING, + number=4, + ) + location_id = proto.Field( + proto.STRING, + number=5, + ) + + +class InspectContentResponse(proto.Message): + r"""Results of inspecting an item. + Attributes: + result (google.cloud.dlp_v2.types.InspectResult): + The findings. + """ + + result = proto.Field( + proto.MESSAGE, + number=1, + message='InspectResult', + ) + + +class OutputStorageConfig(proto.Message): + r"""Cloud repository for storing output. + Attributes: + table (google.cloud.dlp_v2.types.BigQueryTable): + Store findings in an existing table or a new table in an + existing dataset. If table_id is not set a new one will be + generated for you with the following format: + dlp_googleapis_yyyy_mm_dd_[dlp_job_id]. Pacific timezone + will be used for generating the date details. + + For Inspect, each column in an existing output table must + have the same name, type, and mode of a field in the + ``Finding`` object. + + For Risk, an existing output table should be the output of a + previous Risk analysis job run on the same source table, + with the same privacy metric and quasi-identifiers. Risk + jobs that analyze the same table but compute a different + privacy metric, or use different sets of quasi-identifiers, + cannot store their results in the same table. + output_schema (google.cloud.dlp_v2.types.OutputStorageConfig.OutputSchema): + Schema used for writing the findings for Inspect jobs. This + field is only used for Inspect and must be unspecified for + Risk jobs. Columns are derived from the ``Finding`` object. + If appending to an existing table, any columns from the + predefined schema that are missing will be added. No columns + in the existing table will be deleted. + + If unspecified, then all available columns will be used for + a new table or an (existing) table with no schema, and no + changes will be made to an existing table that has a schema. + Only for use with external storage. + """ + class OutputSchema(proto.Enum): + r"""Predefined schemas for storing findings. + Only for use with external storage. + """ + OUTPUT_SCHEMA_UNSPECIFIED = 0 + BASIC_COLUMNS = 1 + GCS_COLUMNS = 2 + DATASTORE_COLUMNS = 3 + BIG_QUERY_COLUMNS = 4 + ALL_COLUMNS = 5 + + table = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message=storage.BigQueryTable, + ) + output_schema = proto.Field( + proto.ENUM, + number=3, + enum=OutputSchema, + ) + + +class InfoTypeStats(proto.Message): + r"""Statistics regarding a specific InfoType. + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + The type of finding this stat is for. + count (int): + Number of findings for this infoType. + """ + + info_type = proto.Field( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + count = proto.Field( + proto.INT64, + number=2, + ) + + +class InspectDataSourceDetails(proto.Message): + r"""The results of an inspect DataSource job. + Attributes: + requested_options (google.cloud.dlp_v2.types.InspectDataSourceDetails.RequestedOptions): + The configuration used for this job. + result (google.cloud.dlp_v2.types.InspectDataSourceDetails.Result): + A summary of the outcome of this inspect job. + """ + + class RequestedOptions(proto.Message): + r"""Snapshot of the inspection configuration. + Attributes: + snapshot_inspect_template (google.cloud.dlp_v2.types.InspectTemplate): + If run with an InspectTemplate, a snapshot of + its state at the time of this run. + job_config (google.cloud.dlp_v2.types.InspectJobConfig): + Inspect config. + """ + + snapshot_inspect_template = proto.Field( + proto.MESSAGE, + number=1, + message='InspectTemplate', + ) + job_config = proto.Field( + proto.MESSAGE, + number=3, + message='InspectJobConfig', + ) + + class Result(proto.Message): + r"""All result fields mentioned below are updated while the job + is processing. + + Attributes: + processed_bytes (int): + Total size in bytes that were processed. + total_estimated_bytes (int): + Estimate of the number of bytes to process. + info_type_stats (Sequence[google.cloud.dlp_v2.types.InfoTypeStats]): + Statistics of how many instances of each info + type were found during inspect job. + hybrid_stats (google.cloud.dlp_v2.types.HybridInspectStatistics): + Statistics related to the processing of + hybrid inspect. Early access feature is in a + pre-release state and might change or have + limited support. For more information, see + https://cloud.google.com/products#product- + launch-stages. + """ + + processed_bytes = proto.Field( + proto.INT64, + number=1, + ) + total_estimated_bytes = proto.Field( + proto.INT64, + number=2, + ) + info_type_stats = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='InfoTypeStats', + ) + hybrid_stats = proto.Field( + proto.MESSAGE, + number=7, + message='HybridInspectStatistics', + ) + + requested_options = proto.Field( + proto.MESSAGE, + number=2, + message=RequestedOptions, + ) + result = proto.Field( + proto.MESSAGE, + number=3, + message=Result, + ) + + +class HybridInspectStatistics(proto.Message): + r"""Statistics related to processing hybrid inspect requests. + Attributes: + processed_count (int): + The number of hybrid inspection requests + processed within this job. + aborted_count (int): + The number of hybrid inspection requests + aborted because the job ran out of quota or was + ended before they could be processed. + pending_count (int): + The number of hybrid requests currently being processed. + Only populated when called via method ``getDlpJob``. A burst + of traffic may cause hybrid inspect requests to be enqueued. + Processing will take place as quickly as possible, but + resource limitations may impact how long a request is + enqueued for. + """ + + processed_count = proto.Field( + proto.INT64, + number=1, + ) + aborted_count = proto.Field( + proto.INT64, + number=2, + ) + pending_count = proto.Field( + proto.INT64, + number=3, + ) + + +class InfoTypeDescription(proto.Message): + r"""InfoType description. + Attributes: + name (str): + Internal name of the infoType. + display_name (str): + Human readable form of the infoType name. + supported_by (Sequence[google.cloud.dlp_v2.types.InfoTypeSupportedBy]): + Which parts of the API supports this + InfoType. + description (str): + Description of the infotype. Translated when + language is provided in the request. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + display_name = proto.Field( + proto.STRING, + number=2, + ) + supported_by = proto.RepeatedField( + proto.ENUM, + number=3, + enum='InfoTypeSupportedBy', + ) + description = proto.Field( + proto.STRING, + number=4, + ) + + +class ListInfoTypesRequest(proto.Message): + r"""Request for the list of infoTypes. + Attributes: + parent (str): + The parent resource name. + + The format of this value is as follows: + + :: + + locations/LOCATION_ID + language_code (str): + BCP-47 language code for localized infoType + friendly names. If omitted, or if localized + strings are not available, en-US strings will be + returned. + filter (str): + filter to only return infoTypes supported by certain parts + of the API. Defaults to supported_by=INSPECT. + location_id (str): + Deprecated. This field has no effect. + """ + + parent = proto.Field( + proto.STRING, + number=4, + ) + language_code = proto.Field( + proto.STRING, + number=1, + ) + filter = proto.Field( + proto.STRING, + number=2, + ) + location_id = proto.Field( + proto.STRING, + number=3, + ) + + +class ListInfoTypesResponse(proto.Message): + r"""Response to the ListInfoTypes request. + Attributes: + info_types (Sequence[google.cloud.dlp_v2.types.InfoTypeDescription]): + Set of sensitive infoTypes. + """ + + info_types = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='InfoTypeDescription', + ) + + +class RiskAnalysisJobConfig(proto.Message): + r"""Configuration for a risk analysis job. See + https://cloud.google.com/dlp/docs/concepts-risk-analysis to + learn more. + + Attributes: + privacy_metric (google.cloud.dlp_v2.types.PrivacyMetric): + Privacy metric to compute. + source_table (google.cloud.dlp_v2.types.BigQueryTable): + Input dataset to compute metrics over. + actions (Sequence[google.cloud.dlp_v2.types.Action]): + Actions to execute at the completion of the + job. Are executed in the order provided. + """ + + privacy_metric = proto.Field( + proto.MESSAGE, + number=1, + message='PrivacyMetric', + ) + source_table = proto.Field( + proto.MESSAGE, + number=2, + message=storage.BigQueryTable, + ) + actions = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='Action', + ) + + +class QuasiId(proto.Message): + r"""A column with a semantic tag attached. + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Required. Identifies the column. + info_type (google.cloud.dlp_v2.types.InfoType): + A column can be tagged with a InfoType to use the relevant + public dataset as a statistical model of population, if + available. We currently support US ZIP codes, region codes, + ages and genders. To programmatically obtain the list of + supported InfoTypes, use ListInfoTypes with the + supported_by=RISK_ANALYSIS filter. + custom_tag (str): + A column can be tagged with a custom tag. In + this case, the user must indicate an auxiliary + table that contains statistical information on + the possible values of this column (below). + inferred (google.protobuf.empty_pb2.Empty): + If no semantic tag is indicated, we infer the + statistical model from the distribution of + values in the input data + """ + + field = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + info_type = proto.Field( + proto.MESSAGE, + number=2, + oneof='tag', + message=storage.InfoType, + ) + custom_tag = proto.Field( + proto.STRING, + number=3, + oneof='tag', + ) + inferred = proto.Field( + proto.MESSAGE, + number=4, + oneof='tag', + message=empty_pb2.Empty, + ) + + +class StatisticalTable(proto.Message): + r"""An auxiliary table containing statistical information on the + relative frequency of different quasi-identifiers values. It has + one or several quasi-identifiers columns, and one column that + indicates the relative frequency of each quasi-identifier tuple. + If a tuple is present in the data but not in the auxiliary + table, the corresponding relative frequency is assumed to be + zero (and thus, the tuple is highly reidentifiable). + + Attributes: + table (google.cloud.dlp_v2.types.BigQueryTable): + Required. Auxiliary table location. + quasi_ids (Sequence[google.cloud.dlp_v2.types.StatisticalTable.QuasiIdentifierField]): + Required. Quasi-identifier columns. + relative_frequency (google.cloud.dlp_v2.types.FieldId): + Required. The relative frequency column must + contain a floating-point number between 0 and 1 + (inclusive). Null values are assumed to be zero. + """ + + class QuasiIdentifierField(proto.Message): + r"""A quasi-identifier column has a custom_tag, used to know which + column in the data corresponds to which column in the statistical + model. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Identifies the column. + custom_tag (str): + A column can be tagged with a custom tag. In + this case, the user must indicate an auxiliary + table that contains statistical information on + the possible values of this column (below). + """ + + field = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + custom_tag = proto.Field( + proto.STRING, + number=2, + ) + + table = proto.Field( + proto.MESSAGE, + number=3, + message=storage.BigQueryTable, + ) + quasi_ids = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=QuasiIdentifierField, + ) + relative_frequency = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + + +class PrivacyMetric(proto.Message): + r"""Privacy metric to compute for reidentification risk analysis. + Attributes: + numerical_stats_config (google.cloud.dlp_v2.types.PrivacyMetric.NumericalStatsConfig): + Numerical stats + categorical_stats_config (google.cloud.dlp_v2.types.PrivacyMetric.CategoricalStatsConfig): + Categorical stats + k_anonymity_config (google.cloud.dlp_v2.types.PrivacyMetric.KAnonymityConfig): + K-anonymity + l_diversity_config (google.cloud.dlp_v2.types.PrivacyMetric.LDiversityConfig): + l-diversity + k_map_estimation_config (google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig): + k-map + delta_presence_estimation_config (google.cloud.dlp_v2.types.PrivacyMetric.DeltaPresenceEstimationConfig): + delta-presence + """ + + class NumericalStatsConfig(proto.Message): + r"""Compute numerical stats over an individual column, including + min, max, and quantiles. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Field to compute numerical stats on. + Supported types are integer, float, date, + datetime, timestamp, time. + """ + + field = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + + class CategoricalStatsConfig(proto.Message): + r"""Compute numerical stats over an individual column, including + number of distinct values and value count distribution. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Field to compute categorical stats on. All + column types are supported except for arrays and + structs. However, it may be more informative to + use NumericalStats when the field type is + supported, depending on the data. + """ + + field = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + + class KAnonymityConfig(proto.Message): + r"""k-anonymity metric, used for analysis of reidentification + risk. + + Attributes: + quasi_ids (Sequence[google.cloud.dlp_v2.types.FieldId]): + Set of fields to compute k-anonymity over. + When multiple fields are specified, they are + considered a single composite key. Structs and + repeated data types are not supported; however, + nested fields are supported so long as they are + not structs themselves or nested within a + repeated field. + entity_id (google.cloud.dlp_v2.types.EntityId): + Message indicating that multiple rows might be associated to + a single individual. If the same entity_id is associated to + multiple quasi-identifier tuples over distinct rows, we + consider the entire collection of tuples as the composite + quasi-identifier. This collection is a multiset: the order + in which the different tuples appear in the dataset is + ignored, but their frequency is taken into account. + + Important note: a maximum of 1000 rows can be associated to + a single entity ID. If more rows are associated with the + same entity ID, some might be ignored. + """ + + quasi_ids = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + entity_id = proto.Field( + proto.MESSAGE, + number=2, + message=storage.EntityId, + ) + + class LDiversityConfig(proto.Message): + r"""l-diversity metric, used for analysis of reidentification + risk. + + Attributes: + quasi_ids (Sequence[google.cloud.dlp_v2.types.FieldId]): + Set of quasi-identifiers indicating how + equivalence classes are defined for the + l-diversity computation. When multiple fields + are specified, they are considered a single + composite key. + sensitive_attribute (google.cloud.dlp_v2.types.FieldId): + Sensitive field for computing the l-value. + """ + + quasi_ids = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + sensitive_attribute = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + + class KMapEstimationConfig(proto.Message): + r"""Reidentifiability metric. This corresponds to a risk model + similar to what is called "journalist risk" in the literature, + except the attack dataset is statistically modeled instead of + being perfectly known. This can be done using publicly available + data (like the US Census), or using a custom statistical model + (indicated as one or several BigQuery tables), or by + extrapolating from the distribution of values in the input + dataset. + + Attributes: + quasi_ids (Sequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.TaggedField]): + Required. Fields considered to be quasi- + dentifiers. No two columns can have the same + tag. + region_code (str): + ISO 3166-1 alpha-2 region code to use in the statistical + modeling. Set if no column is tagged with a region-specific + InfoType (like US_ZIP_5) or a region code. + auxiliary_tables (Sequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable]): + Several auxiliary tables can be used in the analysis. Each + custom_tag used to tag a quasi-identifiers column must + appear in exactly one column of one auxiliary table. + """ + + class TaggedField(proto.Message): + r"""A column with a semantic tag attached. + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Required. Identifies the column. + info_type (google.cloud.dlp_v2.types.InfoType): + A column can be tagged with a InfoType to use the relevant + public dataset as a statistical model of population, if + available. We currently support US ZIP codes, region codes, + ages and genders. To programmatically obtain the list of + supported InfoTypes, use ListInfoTypes with the + supported_by=RISK_ANALYSIS filter. + custom_tag (str): + A column can be tagged with a custom tag. In + this case, the user must indicate an auxiliary + table that contains statistical information on + the possible values of this column (below). + inferred (google.protobuf.empty_pb2.Empty): + If no semantic tag is indicated, we infer the + statistical model from the distribution of + values in the input data + """ + + field = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + info_type = proto.Field( + proto.MESSAGE, + number=2, + oneof='tag', + message=storage.InfoType, + ) + custom_tag = proto.Field( + proto.STRING, + number=3, + oneof='tag', + ) + inferred = proto.Field( + proto.MESSAGE, + number=4, + oneof='tag', + message=empty_pb2.Empty, + ) + + class AuxiliaryTable(proto.Message): + r"""An auxiliary table contains statistical information on the + relative frequency of different quasi-identifiers values. It has + one or several quasi-identifiers columns, and one column that + indicates the relative frequency of each quasi-identifier tuple. + If a tuple is present in the data but not in the auxiliary + table, the corresponding relative frequency is assumed to be + zero (and thus, the tuple is highly reidentifiable). + + Attributes: + table (google.cloud.dlp_v2.types.BigQueryTable): + Required. Auxiliary table location. + quasi_ids (Sequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField]): + Required. Quasi-identifier columns. + relative_frequency (google.cloud.dlp_v2.types.FieldId): + Required. The relative frequency column must + contain a floating-point number between 0 and 1 + (inclusive). Null values are assumed to be zero. + """ + + class QuasiIdField(proto.Message): + r"""A quasi-identifier column has a custom_tag, used to know which + column in the data corresponds to which column in the statistical + model. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Identifies the column. + custom_tag (str): + A auxiliary field. + """ + + field = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + custom_tag = proto.Field( + proto.STRING, + number=2, + ) + + table = proto.Field( + proto.MESSAGE, + number=3, + message=storage.BigQueryTable, + ) + quasi_ids = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField', + ) + relative_frequency = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + + quasi_ids = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='PrivacyMetric.KMapEstimationConfig.TaggedField', + ) + region_code = proto.Field( + proto.STRING, + number=2, + ) + auxiliary_tables = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='PrivacyMetric.KMapEstimationConfig.AuxiliaryTable', + ) + + class DeltaPresenceEstimationConfig(proto.Message): + r"""δ-presence metric, used to estimate how likely it is for an + attacker to figure out that one given individual appears in a + de-identified dataset. Similarly to the k-map metric, we cannot + compute δ-presence exactly without knowing the attack dataset, + so we use a statistical model instead. + + Attributes: + quasi_ids (Sequence[google.cloud.dlp_v2.types.QuasiId]): + Required. Fields considered to be quasi- + dentifiers. No two fields can have the same tag. + region_code (str): + ISO 3166-1 alpha-2 region code to use in the statistical + modeling. Set if no column is tagged with a region-specific + InfoType (like US_ZIP_5) or a region code. + auxiliary_tables (Sequence[google.cloud.dlp_v2.types.StatisticalTable]): + Several auxiliary tables can be used in the analysis. Each + custom_tag used to tag a quasi-identifiers field must appear + in exactly one field of one auxiliary table. + """ + + quasi_ids = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='QuasiId', + ) + region_code = proto.Field( + proto.STRING, + number=2, + ) + auxiliary_tables = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='StatisticalTable', + ) + + numerical_stats_config = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message=NumericalStatsConfig, + ) + categorical_stats_config = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message=CategoricalStatsConfig, + ) + k_anonymity_config = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message=KAnonymityConfig, + ) + l_diversity_config = proto.Field( + proto.MESSAGE, + number=4, + oneof='type', + message=LDiversityConfig, + ) + k_map_estimation_config = proto.Field( + proto.MESSAGE, + number=5, + oneof='type', + message=KMapEstimationConfig, + ) + delta_presence_estimation_config = proto.Field( + proto.MESSAGE, + number=6, + oneof='type', + message=DeltaPresenceEstimationConfig, + ) + + +class AnalyzeDataSourceRiskDetails(proto.Message): + r"""Result of a risk analysis operation request. + Attributes: + requested_privacy_metric (google.cloud.dlp_v2.types.PrivacyMetric): + Privacy metric to compute. + requested_source_table (google.cloud.dlp_v2.types.BigQueryTable): + Input dataset to compute metrics over. + numerical_stats_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.NumericalStatsResult): + Numerical stats result + categorical_stats_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.CategoricalStatsResult): + Categorical stats result + k_anonymity_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult): + K-anonymity result + l_diversity_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult): + L-divesity result + k_map_estimation_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult): + K-map result + delta_presence_estimation_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult): + Delta-presence result + requested_options (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.RequestedRiskAnalysisOptions): + The configuration used for this job. + """ + + class NumericalStatsResult(proto.Message): + r"""Result of the numerical stats computation. + Attributes: + min_value (google.cloud.dlp_v2.types.Value): + Minimum value appearing in the column. + max_value (google.cloud.dlp_v2.types.Value): + Maximum value appearing in the column. + quantile_values (Sequence[google.cloud.dlp_v2.types.Value]): + List of 99 values that partition the set of + field values into 100 equal sized buckets. + """ + + min_value = proto.Field( + proto.MESSAGE, + number=1, + message='Value', + ) + max_value = proto.Field( + proto.MESSAGE, + number=2, + message='Value', + ) + quantile_values = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='Value', + ) + + class CategoricalStatsResult(proto.Message): + r"""Result of the categorical stats computation. + Attributes: + value_frequency_histogram_buckets (Sequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket]): + Histogram of value frequencies in the column. + """ + + class CategoricalStatsHistogramBucket(proto.Message): + r"""Histogram of value frequencies in the column. + Attributes: + value_frequency_lower_bound (int): + Lower bound on the value frequency of the + values in this bucket. + value_frequency_upper_bound (int): + Upper bound on the value frequency of the + values in this bucket. + bucket_size (int): + Total number of values in this bucket. + bucket_values (Sequence[google.cloud.dlp_v2.types.ValueFrequency]): + Sample of value frequencies in this bucket. + The total number of values returned per bucket + is capped at 20. + bucket_value_count (int): + Total number of distinct values in this + bucket. + """ + + value_frequency_lower_bound = proto.Field( + proto.INT64, + number=1, + ) + value_frequency_upper_bound = proto.Field( + proto.INT64, + number=2, + ) + bucket_size = proto.Field( + proto.INT64, + number=3, + ) + bucket_values = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='ValueFrequency', + ) + bucket_value_count = proto.Field( + proto.INT64, + number=5, + ) + + value_frequency_histogram_buckets = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket', + ) + + class KAnonymityResult(proto.Message): + r"""Result of the k-anonymity computation. + Attributes: + equivalence_class_histogram_buckets (Sequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket]): + Histogram of k-anonymity equivalence classes. + """ + + class KAnonymityEquivalenceClass(proto.Message): + r"""The set of columns' values that share the same ldiversity + value + + Attributes: + quasi_ids_values (Sequence[google.cloud.dlp_v2.types.Value]): + Set of values defining the equivalence class. + One value per quasi-identifier column in the + original KAnonymity metric message. The order is + always the same as the original request. + equivalence_class_size (int): + Size of the equivalence class, for example + number of rows with the above set of values. + """ + + quasi_ids_values = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Value', + ) + equivalence_class_size = proto.Field( + proto.INT64, + number=2, + ) + + class KAnonymityHistogramBucket(proto.Message): + r"""Histogram of k-anonymity equivalence classes. + Attributes: + equivalence_class_size_lower_bound (int): + Lower bound on the size of the equivalence + classes in this bucket. + equivalence_class_size_upper_bound (int): + Upper bound on the size of the equivalence + classes in this bucket. + bucket_size (int): + Total number of equivalence classes in this + bucket. + bucket_values (Sequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass]): + Sample of equivalence classes in this bucket. + The total number of classes returned per bucket + is capped at 20. + bucket_value_count (int): + Total number of distinct equivalence classes + in this bucket. + """ + + equivalence_class_size_lower_bound = proto.Field( + proto.INT64, + number=1, + ) + equivalence_class_size_upper_bound = proto.Field( + proto.INT64, + number=2, + ) + bucket_size = proto.Field( + proto.INT64, + number=3, + ) + bucket_values = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass', + ) + bucket_value_count = proto.Field( + proto.INT64, + number=5, + ) + + equivalence_class_histogram_buckets = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket', + ) + + class LDiversityResult(proto.Message): + r"""Result of the l-diversity computation. + Attributes: + sensitive_value_frequency_histogram_buckets (Sequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket]): + Histogram of l-diversity equivalence class + sensitive value frequencies. + """ + + class LDiversityEquivalenceClass(proto.Message): + r"""The set of columns' values that share the same ldiversity + value. + + Attributes: + quasi_ids_values (Sequence[google.cloud.dlp_v2.types.Value]): + Quasi-identifier values defining the + k-anonymity equivalence class. The order is + always the same as the original request. + equivalence_class_size (int): + Size of the k-anonymity equivalence class. + num_distinct_sensitive_values (int): + Number of distinct sensitive values in this + equivalence class. + top_sensitive_values (Sequence[google.cloud.dlp_v2.types.ValueFrequency]): + Estimated frequencies of top sensitive + values. + """ + + quasi_ids_values = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Value', + ) + equivalence_class_size = proto.Field( + proto.INT64, + number=2, + ) + num_distinct_sensitive_values = proto.Field( + proto.INT64, + number=3, + ) + top_sensitive_values = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='ValueFrequency', + ) + + class LDiversityHistogramBucket(proto.Message): + r"""Histogram of l-diversity equivalence class sensitive value + frequencies. + + Attributes: + sensitive_value_frequency_lower_bound (int): + Lower bound on the sensitive value + frequencies of the equivalence classes in this + bucket. + sensitive_value_frequency_upper_bound (int): + Upper bound on the sensitive value + frequencies of the equivalence classes in this + bucket. + bucket_size (int): + Total number of equivalence classes in this + bucket. + bucket_values (Sequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass]): + Sample of equivalence classes in this bucket. + The total number of classes returned per bucket + is capped at 20. + bucket_value_count (int): + Total number of distinct equivalence classes + in this bucket. + """ + + sensitive_value_frequency_lower_bound = proto.Field( + proto.INT64, + number=1, + ) + sensitive_value_frequency_upper_bound = proto.Field( + proto.INT64, + number=2, + ) + bucket_size = proto.Field( + proto.INT64, + number=3, + ) + bucket_values = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass', + ) + bucket_value_count = proto.Field( + proto.INT64, + number=5, + ) + + sensitive_value_frequency_histogram_buckets = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket', + ) + + class KMapEstimationResult(proto.Message): + r"""Result of the reidentifiability analysis. Note that these + results are an estimation, not exact values. + + Attributes: + k_map_estimation_histogram (Sequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket]): + The intervals [min_anonymity, max_anonymity] do not overlap. + If a value doesn't correspond to any such interval, the + associated frequency is zero. For example, the following + records: {min_anonymity: 1, max_anonymity: 1, frequency: 17} + {min_anonymity: 2, max_anonymity: 3, frequency: 42} + {min_anonymity: 5, max_anonymity: 10, frequency: 99} mean + that there are no record with an estimated anonymity of 4, + 5, or larger than 10. + """ + + class KMapEstimationQuasiIdValues(proto.Message): + r"""A tuple of values for the quasi-identifier columns. + Attributes: + quasi_ids_values (Sequence[google.cloud.dlp_v2.types.Value]): + The quasi-identifier values. + estimated_anonymity (int): + The estimated anonymity for these quasi- + dentifier values. + """ + + quasi_ids_values = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Value', + ) + estimated_anonymity = proto.Field( + proto.INT64, + number=2, + ) + + class KMapEstimationHistogramBucket(proto.Message): + r"""A KMapEstimationHistogramBucket message with the following values: + min_anonymity: 3 max_anonymity: 5 frequency: 42 means that there are + 42 records whose quasi-identifier values correspond to 3, 4 or 5 + people in the overlying population. An important particular case is + when min_anonymity = max_anonymity = 1: the frequency field then + corresponds to the number of uniquely identifiable records. + + Attributes: + min_anonymity (int): + Always positive. + max_anonymity (int): + Always greater than or equal to min_anonymity. + bucket_size (int): + Number of records within these anonymity + bounds. + bucket_values (Sequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues]): + Sample of quasi-identifier tuple values in + this bucket. The total number of classes + returned per bucket is capped at 20. + bucket_value_count (int): + Total number of distinct quasi-identifier + tuple values in this bucket. + """ + + min_anonymity = proto.Field( + proto.INT64, + number=1, + ) + max_anonymity = proto.Field( + proto.INT64, + number=2, + ) + bucket_size = proto.Field( + proto.INT64, + number=5, + ) + bucket_values = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues', + ) + bucket_value_count = proto.Field( + proto.INT64, + number=7, + ) + + k_map_estimation_histogram = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket', + ) + + class DeltaPresenceEstimationResult(proto.Message): + r"""Result of the δ-presence computation. Note that these results + are an estimation, not exact values. + + Attributes: + delta_presence_estimation_histogram (Sequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket]): + The intervals [min_probability, max_probability) do not + overlap. If a value doesn't correspond to any such interval, + the associated frequency is zero. For example, the following + records: {min_probability: 0, max_probability: 0.1, + frequency: 17} {min_probability: 0.2, max_probability: 0.3, + frequency: 42} {min_probability: 0.3, max_probability: 0.4, + frequency: 99} mean that there are no record with an + estimated probability in [0.1, 0.2) nor larger or equal to + 0.4. + """ + + class DeltaPresenceEstimationQuasiIdValues(proto.Message): + r"""A tuple of values for the quasi-identifier columns. + Attributes: + quasi_ids_values (Sequence[google.cloud.dlp_v2.types.Value]): + The quasi-identifier values. + estimated_probability (float): + The estimated probability that a given individual sharing + these quasi-identifier values is in the dataset. This value, + typically called δ, is the ratio between the number of + records in the dataset with these quasi-identifier values, + and the total number of individuals (inside *and* outside + the dataset) with these quasi-identifier values. For + example, if there are 15 individuals in the dataset who + share the same quasi-identifier values, and an estimated 100 + people in the entire population with these values, then δ is + 0.15. + """ + + quasi_ids_values = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Value', + ) + estimated_probability = proto.Field( + proto.DOUBLE, + number=2, + ) + + class DeltaPresenceEstimationHistogramBucket(proto.Message): + r"""A DeltaPresenceEstimationHistogramBucket message with the following + values: min_probability: 0.1 max_probability: 0.2 frequency: 42 + means that there are 42 records for which δ is in [0.1, 0.2). An + important particular case is when min_probability = max_probability + = 1: then, every individual who shares this quasi-identifier + combination is in the dataset. + + Attributes: + min_probability (float): + Between 0 and 1. + max_probability (float): + Always greater than or equal to min_probability. + bucket_size (int): + Number of records within these probability + bounds. + bucket_values (Sequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues]): + Sample of quasi-identifier tuple values in + this bucket. The total number of classes + returned per bucket is capped at 20. + bucket_value_count (int): + Total number of distinct quasi-identifier + tuple values in this bucket. + """ + + min_probability = proto.Field( + proto.DOUBLE, + number=1, + ) + max_probability = proto.Field( + proto.DOUBLE, + number=2, + ) + bucket_size = proto.Field( + proto.INT64, + number=5, + ) + bucket_values = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues', + ) + bucket_value_count = proto.Field( + proto.INT64, + number=7, + ) + + delta_presence_estimation_histogram = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket', + ) + + class RequestedRiskAnalysisOptions(proto.Message): + r"""Risk analysis options. + Attributes: + job_config (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): + The job config for the risk job. + """ + + job_config = proto.Field( + proto.MESSAGE, + number=1, + message='RiskAnalysisJobConfig', + ) + + requested_privacy_metric = proto.Field( + proto.MESSAGE, + number=1, + message='PrivacyMetric', + ) + requested_source_table = proto.Field( + proto.MESSAGE, + number=2, + message=storage.BigQueryTable, + ) + numerical_stats_result = proto.Field( + proto.MESSAGE, + number=3, + oneof='result', + message=NumericalStatsResult, + ) + categorical_stats_result = proto.Field( + proto.MESSAGE, + number=4, + oneof='result', + message=CategoricalStatsResult, + ) + k_anonymity_result = proto.Field( + proto.MESSAGE, + number=5, + oneof='result', + message=KAnonymityResult, + ) + l_diversity_result = proto.Field( + proto.MESSAGE, + number=6, + oneof='result', + message=LDiversityResult, + ) + k_map_estimation_result = proto.Field( + proto.MESSAGE, + number=7, + oneof='result', + message=KMapEstimationResult, + ) + delta_presence_estimation_result = proto.Field( + proto.MESSAGE, + number=9, + oneof='result', + message=DeltaPresenceEstimationResult, + ) + requested_options = proto.Field( + proto.MESSAGE, + number=10, + message=RequestedRiskAnalysisOptions, + ) + + +class ValueFrequency(proto.Message): + r"""A value of a field, including its frequency. + Attributes: + value (google.cloud.dlp_v2.types.Value): + A value contained in the field in question. + count (int): + How many times the value is contained in the + field. + """ + + value = proto.Field( + proto.MESSAGE, + number=1, + message='Value', + ) + count = proto.Field( + proto.INT64, + number=2, + ) + + +class Value(proto.Message): + r"""Set of primitive values supported by the system. Note that for the + purposes of inspection or transformation, the number of bytes + considered to comprise a 'Value' is based on its representation as a + UTF-8 encoded string. For example, if 'integer_value' is set to + 123456789, the number of bytes would be counted as 9, even though an + int64 only holds up to 8 bytes of data. + + Attributes: + integer_value (int): + integer + float_value (float): + float + string_value (str): + string + boolean_value (bool): + boolean + timestamp_value (google.protobuf.timestamp_pb2.Timestamp): + timestamp + time_value (google.type.timeofday_pb2.TimeOfDay): + time of day + date_value (google.type.date_pb2.Date): + date + day_of_week_value (google.type.dayofweek_pb2.DayOfWeek): + day of week + """ + + integer_value = proto.Field( + proto.INT64, + number=1, + oneof='type', + ) + float_value = proto.Field( + proto.DOUBLE, + number=2, + oneof='type', + ) + string_value = proto.Field( + proto.STRING, + number=3, + oneof='type', + ) + boolean_value = proto.Field( + proto.BOOL, + number=4, + oneof='type', + ) + timestamp_value = proto.Field( + proto.MESSAGE, + number=5, + oneof='type', + message=timestamp_pb2.Timestamp, + ) + time_value = proto.Field( + proto.MESSAGE, + number=6, + oneof='type', + message=timeofday_pb2.TimeOfDay, + ) + date_value = proto.Field( + proto.MESSAGE, + number=7, + oneof='type', + message=date_pb2.Date, + ) + day_of_week_value = proto.Field( + proto.ENUM, + number=8, + oneof='type', + enum=dayofweek_pb2.DayOfWeek, + ) + + +class QuoteInfo(proto.Message): + r"""Message for infoType-dependent details parsed from quote. + Attributes: + date_time (google.cloud.dlp_v2.types.DateTime): + The date time indicated by the quote. + """ + + date_time = proto.Field( + proto.MESSAGE, + number=2, + oneof='parsed_quote', + message='DateTime', + ) + + +class DateTime(proto.Message): + r"""Message for a date time object. + e.g. 2018-01-01, 5th August. + + Attributes: + date (google.type.date_pb2.Date): + One or more of the following must be set. + Must be a valid date or time value. + day_of_week (google.type.dayofweek_pb2.DayOfWeek): + Day of week + time (google.type.timeofday_pb2.TimeOfDay): + Time of day + time_zone (google.cloud.dlp_v2.types.DateTime.TimeZone): + Time zone + """ + + class TimeZone(proto.Message): + r"""Time zone of the date time object. + Attributes: + offset_minutes (int): + Set only if the offset can be determined. + Positive for time ahead of UTC. E.g. For + "UTC-9", this value is -540. + """ + + offset_minutes = proto.Field( + proto.INT32, + number=1, + ) + + date = proto.Field( + proto.MESSAGE, + number=1, + message=date_pb2.Date, + ) + day_of_week = proto.Field( + proto.ENUM, + number=2, + enum=dayofweek_pb2.DayOfWeek, + ) + time = proto.Field( + proto.MESSAGE, + number=3, + message=timeofday_pb2.TimeOfDay, + ) + time_zone = proto.Field( + proto.MESSAGE, + number=4, + message=TimeZone, + ) + + +class DeidentifyConfig(proto.Message): + r"""The configuration that controls how the data will change. + Attributes: + info_type_transformations (google.cloud.dlp_v2.types.InfoTypeTransformations): + Treat the dataset as free-form text and apply + the same free text transformation everywhere. + record_transformations (google.cloud.dlp_v2.types.RecordTransformations): + Treat the dataset as structured. + Transformations can be applied to specific + locations within structured datasets, such as + transforming a column within a table. + transformation_error_handling (google.cloud.dlp_v2.types.TransformationErrorHandling): + Mode for handling transformation errors. If left + unspecified, the default mode is + ``TransformationErrorHandling.ThrowError``. + """ + + info_type_transformations = proto.Field( + proto.MESSAGE, + number=1, + oneof='transformation', + message='InfoTypeTransformations', + ) + record_transformations = proto.Field( + proto.MESSAGE, + number=2, + oneof='transformation', + message='RecordTransformations', + ) + transformation_error_handling = proto.Field( + proto.MESSAGE, + number=3, + message='TransformationErrorHandling', + ) + + +class TransformationErrorHandling(proto.Message): + r"""How to handle transformation errors during de-identification. A + transformation error occurs when the requested transformation is + incompatible with the data. For example, trying to de-identify an IP + address using a ``DateShift`` transformation would result in a + transformation error, since date info cannot be extracted from an IP + address. Information about any incompatible transformations, and how + they were handled, is returned in the response as part of the + ``TransformationOverviews``. + + Attributes: + throw_error (google.cloud.dlp_v2.types.TransformationErrorHandling.ThrowError): + Throw an error + leave_untransformed (google.cloud.dlp_v2.types.TransformationErrorHandling.LeaveUntransformed): + Ignore errors + """ + + class ThrowError(proto.Message): + r"""Throw an error and fail the request when a transformation + error occurs. + """ + + class LeaveUntransformed(proto.Message): + r"""Skips the data without modifying it if the requested transformation + would cause an error. For example, if a ``DateShift`` transformation + were applied an an IP address, this mode would leave the IP address + unchanged in the response. + """ + + throw_error = proto.Field( + proto.MESSAGE, + number=1, + oneof='mode', + message=ThrowError, + ) + leave_untransformed = proto.Field( + proto.MESSAGE, + number=2, + oneof='mode', + message=LeaveUntransformed, + ) + + +class PrimitiveTransformation(proto.Message): + r"""A rule for transforming a value. + Attributes: + replace_config (google.cloud.dlp_v2.types.ReplaceValueConfig): + Replace + redact_config (google.cloud.dlp_v2.types.RedactConfig): + Redact + character_mask_config (google.cloud.dlp_v2.types.CharacterMaskConfig): + Mask + crypto_replace_ffx_fpe_config (google.cloud.dlp_v2.types.CryptoReplaceFfxFpeConfig): + Ffx-Fpe + fixed_size_bucketing_config (google.cloud.dlp_v2.types.FixedSizeBucketingConfig): + Fixed size bucketing + bucketing_config (google.cloud.dlp_v2.types.BucketingConfig): + Bucketing + replace_with_info_type_config (google.cloud.dlp_v2.types.ReplaceWithInfoTypeConfig): + Replace with infotype + time_part_config (google.cloud.dlp_v2.types.TimePartConfig): + Time extraction + crypto_hash_config (google.cloud.dlp_v2.types.CryptoHashConfig): + Crypto + date_shift_config (google.cloud.dlp_v2.types.DateShiftConfig): + Date Shift + crypto_deterministic_config (google.cloud.dlp_v2.types.CryptoDeterministicConfig): + Deterministic Crypto + """ + + replace_config = proto.Field( + proto.MESSAGE, + number=1, + oneof='transformation', + message='ReplaceValueConfig', + ) + redact_config = proto.Field( + proto.MESSAGE, + number=2, + oneof='transformation', + message='RedactConfig', + ) + character_mask_config = proto.Field( + proto.MESSAGE, + number=3, + oneof='transformation', + message='CharacterMaskConfig', + ) + crypto_replace_ffx_fpe_config = proto.Field( + proto.MESSAGE, + number=4, + oneof='transformation', + message='CryptoReplaceFfxFpeConfig', + ) + fixed_size_bucketing_config = proto.Field( + proto.MESSAGE, + number=5, + oneof='transformation', + message='FixedSizeBucketingConfig', + ) + bucketing_config = proto.Field( + proto.MESSAGE, + number=6, + oneof='transformation', + message='BucketingConfig', + ) + replace_with_info_type_config = proto.Field( + proto.MESSAGE, + number=7, + oneof='transformation', + message='ReplaceWithInfoTypeConfig', + ) + time_part_config = proto.Field( + proto.MESSAGE, + number=8, + oneof='transformation', + message='TimePartConfig', + ) + crypto_hash_config = proto.Field( + proto.MESSAGE, + number=9, + oneof='transformation', + message='CryptoHashConfig', + ) + date_shift_config = proto.Field( + proto.MESSAGE, + number=11, + oneof='transformation', + message='DateShiftConfig', + ) + crypto_deterministic_config = proto.Field( + proto.MESSAGE, + number=12, + oneof='transformation', + message='CryptoDeterministicConfig', + ) + + +class TimePartConfig(proto.Message): + r"""For use with ``Date``, ``Timestamp``, and ``TimeOfDay``, extract or + preserve a portion of the value. + + Attributes: + part_to_extract (google.cloud.dlp_v2.types.TimePartConfig.TimePart): + The part of the time to keep. + """ + class TimePart(proto.Enum): + r"""Components that make up time.""" + TIME_PART_UNSPECIFIED = 0 + YEAR = 1 + MONTH = 2 + DAY_OF_MONTH = 3 + DAY_OF_WEEK = 4 + WEEK_OF_YEAR = 5 + HOUR_OF_DAY = 6 + + part_to_extract = proto.Field( + proto.ENUM, + number=1, + enum=TimePart, + ) + + +class CryptoHashConfig(proto.Message): + r"""Pseudonymization method that generates surrogates via + cryptographic hashing. Uses SHA-256. + The key size must be either 32 or 64 bytes. + Outputs a base64 encoded representation of the hashed output + (for example, L7k0BHmF1ha5U3NfGykjro4xWi1MPVQPjhMAZbSV9mM=). + Currently, only string and integer values can be hashed. See + https://cloud.google.com/dlp/docs/pseudonymization to learn + more. + + Attributes: + crypto_key (google.cloud.dlp_v2.types.CryptoKey): + The key used by the hash function. + """ + + crypto_key = proto.Field( + proto.MESSAGE, + number=1, + message='CryptoKey', + ) + + +class CryptoDeterministicConfig(proto.Message): + r"""Pseudonymization method that generates deterministic + encryption for the given input. Outputs a base64 encoded + representation of the encrypted output. Uses AES-SIV based on + the RFC https://tools.ietf.org/html/rfc5297. + + Attributes: + crypto_key (google.cloud.dlp_v2.types.CryptoKey): + The key used by the encryption function. + surrogate_info_type (google.cloud.dlp_v2.types.InfoType): + The custom info type to annotate the surrogate with. This + annotation will be applied to the surrogate by prefixing it + with the name of the custom info type followed by the number + of characters comprising the surrogate. The following scheme + defines the format: {info type name}({surrogate character + count}):{surrogate} + + For example, if the name of custom info type is + 'MY_TOKEN_INFO_TYPE' and the surrogate is 'abc', the full + replacement value will be: 'MY_TOKEN_INFO_TYPE(3):abc' + + This annotation identifies the surrogate when inspecting + content using the custom info type 'Surrogate'. This + facilitates reversal of the surrogate when it occurs in free + text. + + Note: For record transformations where the entire cell in a + table is being transformed, surrogates are not mandatory. + Surrogates are used to denote the location of the token and + are necessary for re-identification in free form text. + + In order for inspection to work properly, the name of this + info type must not occur naturally anywhere in your data; + otherwise, inspection may either + + - reverse a surrogate that does not correspond to an actual + identifier + - be unable to parse the surrogate and result in an error + + Therefore, choose your custom info type name carefully after + considering what your data looks like. One way to select a + name that has a high chance of yielding reliable detection + is to include one or more unicode characters that are highly + improbable to exist in your data. For example, assuming your + data is entered from a regular ASCII keyboard, the symbol + with the hex code point 29DD might be used like so: + ⧝MY_TOKEN_TYPE. + context (google.cloud.dlp_v2.types.FieldId): + A context may be used for higher security and maintaining + referential integrity such that the same identifier in two + different contexts will be given a distinct surrogate. The + context is appended to plaintext value being encrypted. On + decryption the provided context is validated against the + value used during encryption. If a context was provided + during encryption, same context must be provided during + decryption as well. + + If the context is not set, plaintext would be used as is for + encryption. If the context is set but: + + 1. there is no record present when transforming a given + value or + 2. the field is not present when transforming a given value, + + plaintext would be used as is for encryption. + + Note that case (1) is expected when an + ``InfoTypeTransformation`` is applied to both structured and + non-structured ``ContentItem``\ s. + """ + + crypto_key = proto.Field( + proto.MESSAGE, + number=1, + message='CryptoKey', + ) + surrogate_info_type = proto.Field( + proto.MESSAGE, + number=2, + message=storage.InfoType, + ) + context = proto.Field( + proto.MESSAGE, + number=3, + message=storage.FieldId, + ) + + +class ReplaceValueConfig(proto.Message): + r"""Replace each input value with a given ``Value``. + Attributes: + new_value (google.cloud.dlp_v2.types.Value): + Value to replace it with. + """ + + new_value = proto.Field( + proto.MESSAGE, + number=1, + message='Value', + ) + + +class ReplaceWithInfoTypeConfig(proto.Message): + r"""Replace each matching finding with the name of the info_type. """ + + +class RedactConfig(proto.Message): + r"""Redact a given value. For example, if used with an + ``InfoTypeTransformation`` transforming PHONE_NUMBER, and input 'My + phone number is 206-555-0123', the output would be 'My phone number + is '. + """ + + +class CharsToIgnore(proto.Message): + r"""Characters to skip when doing deidentification of a value. + These will be left alone and skipped. + + Attributes: + characters_to_skip (str): + Characters to not transform when masking. + common_characters_to_ignore (google.cloud.dlp_v2.types.CharsToIgnore.CommonCharsToIgnore): + Common characters to not transform when + masking. Useful to avoid removing punctuation. + """ + class CommonCharsToIgnore(proto.Enum): + r"""Convenience enum for indication common characters to not + transform. + """ + COMMON_CHARS_TO_IGNORE_UNSPECIFIED = 0 + NUMERIC = 1 + ALPHA_UPPER_CASE = 2 + ALPHA_LOWER_CASE = 3 + PUNCTUATION = 4 + WHITESPACE = 5 + + characters_to_skip = proto.Field( + proto.STRING, + number=1, + oneof='characters', + ) + common_characters_to_ignore = proto.Field( + proto.ENUM, + number=2, + oneof='characters', + enum=CommonCharsToIgnore, + ) + + +class CharacterMaskConfig(proto.Message): + r"""Partially mask a string by replacing a given number of characters + with a fixed character. Masking can start from the beginning or end + of the string. This can be used on data of any type (numbers, longs, + and so on) and when de-identifying structured data we'll attempt to + preserve the original data's type. (This allows you to take a long + like 123 and modify it to a string like \**3. + + Attributes: + masking_character (str): + Character to use to mask the sensitive values—for example, + ``*`` for an alphabetic string such as a name, or ``0`` for + a numeric string such as ZIP code or credit card number. + This string must have a length of 1. If not supplied, this + value defaults to ``*`` for strings, and ``0`` for digits. + number_to_mask (int): + Number of characters to mask. If not set, all + matching chars will be masked. Skipped + characters do not count towards this tally. + reverse_order (bool): + Mask characters in reverse order. For example, if + ``masking_character`` is ``0``, ``number_to_mask`` is + ``14``, and ``reverse_order`` is ``false``, then the input + string ``1234-5678-9012-3456`` is masked as + ``00000000000000-3456``. If ``masking_character`` is ``*``, + ``number_to_mask`` is ``3``, and ``reverse_order`` is + ``true``, then the string ``12345`` is masked as ``12***``. + characters_to_ignore (Sequence[google.cloud.dlp_v2.types.CharsToIgnore]): + When masking a string, items in this list will be skipped + when replacing characters. For example, if the input string + is ``555-555-5555`` and you instruct Cloud DLP to skip ``-`` + and mask 5 characters with ``*``, Cloud DLP returns + ``***-**5-5555``. + """ + + masking_character = proto.Field( + proto.STRING, + number=1, + ) + number_to_mask = proto.Field( + proto.INT32, + number=2, + ) + reverse_order = proto.Field( + proto.BOOL, + number=3, + ) + characters_to_ignore = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='CharsToIgnore', + ) + + +class FixedSizeBucketingConfig(proto.Message): + r"""Buckets values based on fixed size ranges. The Bucketing + transformation can provide all of this functionality, but requires + more configuration. This message is provided as a convenience to the + user for simple bucketing strategies. + + The transformed value will be a hyphenated string of + {lower_bound}-{upper_bound}, i.e if lower_bound = 10 and upper_bound + = 20 all values that are within this bucket will be replaced with + "10-20". + + This can be used on data of type: double, long. + + If the bound Value type differs from the type of data being + transformed, we will first attempt converting the type of the data + to be transformed to match the type of the bound before comparing. + + See https://cloud.google.com/dlp/docs/concepts-bucketing to learn + more. + + Attributes: + lower_bound (google.cloud.dlp_v2.types.Value): + Required. Lower bound value of buckets. All values less than + ``lower_bound`` are grouped together into a single bucket; + for example if ``lower_bound`` = 10, then all values less + than 10 are replaced with the value "-10". + upper_bound (google.cloud.dlp_v2.types.Value): + Required. Upper bound value of buckets. All values greater + than upper_bound are grouped together into a single bucket; + for example if ``upper_bound`` = 89, then all values greater + than 89 are replaced with the value "89+". + bucket_size (float): + Required. Size of each bucket (except for minimum and + maximum buckets). So if ``lower_bound`` = 10, + ``upper_bound`` = 89, and ``bucket_size`` = 10, then the + following buckets would be used: -10, 10-20, 20-30, 30-40, + 40-50, 50-60, 60-70, 70-80, 80-89, 89+. Precision up to 2 + decimals works. + """ + + lower_bound = proto.Field( + proto.MESSAGE, + number=1, + message='Value', + ) + upper_bound = proto.Field( + proto.MESSAGE, + number=2, + message='Value', + ) + bucket_size = proto.Field( + proto.DOUBLE, + number=3, + ) + + +class BucketingConfig(proto.Message): + r"""Generalization function that buckets values based on ranges. The + ranges and replacement values are dynamically provided by the user + for custom behavior, such as 1-30 -> LOW 31-65 -> MEDIUM 66-100 -> + HIGH This can be used on data of type: number, long, string, + timestamp. If the bound ``Value`` type differs from the type of data + being transformed, we will first attempt converting the type of the + data to be transformed to match the type of the bound before + comparing. See https://cloud.google.com/dlp/docs/concepts-bucketing + to learn more. + + Attributes: + buckets (Sequence[google.cloud.dlp_v2.types.BucketingConfig.Bucket]): + Set of buckets. Ranges must be non- + verlapping. + """ + + class Bucket(proto.Message): + r"""Bucket is represented as a range, along with replacement + values. + + Attributes: + min_ (google.cloud.dlp_v2.types.Value): + Lower bound of the range, inclusive. Type + should be the same as max if used. + max_ (google.cloud.dlp_v2.types.Value): + Upper bound of the range, exclusive; type + must match min. + replacement_value (google.cloud.dlp_v2.types.Value): + Required. Replacement value for this bucket. + """ + + min_ = proto.Field( + proto.MESSAGE, + number=1, + message='Value', + ) + max_ = proto.Field( + proto.MESSAGE, + number=2, + message='Value', + ) + replacement_value = proto.Field( + proto.MESSAGE, + number=3, + message='Value', + ) + + buckets = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=Bucket, + ) + + +class CryptoReplaceFfxFpeConfig(proto.Message): + r"""Replaces an identifier with a surrogate using Format Preserving + Encryption (FPE) with the FFX mode of operation; however when used + in the ``ReidentifyContent`` API method, it serves the opposite + function by reversing the surrogate back into the original + identifier. The identifier must be encoded as ASCII. For a given + crypto key and context, the same identifier will be replaced with + the same surrogate. Identifiers must be at least two characters + long. In the case that the identifier is the empty string, it will + be skipped. See https://cloud.google.com/dlp/docs/pseudonymization + to learn more. + + Note: We recommend using CryptoDeterministicConfig for all use cases + which do not require preserving the input alphabet space and size, + plus warrant referential integrity. + + Attributes: + crypto_key (google.cloud.dlp_v2.types.CryptoKey): + Required. The key used by the encryption + algorithm. + context (google.cloud.dlp_v2.types.FieldId): + The 'tweak', a context may be used for higher security since + the same identifier in two different contexts won't be given + the same surrogate. If the context is not set, a default + tweak will be used. + + If the context is set but: + + 1. there is no record present when transforming a given + value or + 2. the field is not present when transforming a given value, + + a default tweak will be used. + + Note that case (1) is expected when an + ``InfoTypeTransformation`` is applied to both structured and + non-structured ``ContentItem``\ s. Currently, the referenced + field may be of value type integer or string. + + The tweak is constructed as a sequence of bytes in big + endian byte order such that: + + - a 64 bit integer is encoded followed by a single byte of + value 1 + - a string is encoded in UTF-8 format followed by a single + byte of value 2 + common_alphabet (google.cloud.dlp_v2.types.CryptoReplaceFfxFpeConfig.FfxCommonNativeAlphabet): + Common alphabets. + custom_alphabet (str): + This is supported by mapping these to the alphanumeric + characters that the FFX mode natively supports. This happens + before/after encryption/decryption. Each character listed + must appear only once. Number of characters must be in the + range [2, 95]. This must be encoded as ASCII. The order of + characters does not matter. The full list of allowed + characters is: + 0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz + ~`!@#$%^&*()_-+={[}]|:;"'<,>.?/ + radix (int): + The native way to select the alphabet. Must be in the range + [2, 95]. + surrogate_info_type (google.cloud.dlp_v2.types.InfoType): + The custom infoType to annotate the surrogate with. This + annotation will be applied to the surrogate by prefixing it + with the name of the custom infoType followed by the number + of characters comprising the surrogate. The following scheme + defines the format: + info_type_name(surrogate_character_count):surrogate + + For example, if the name of custom infoType is + 'MY_TOKEN_INFO_TYPE' and the surrogate is 'abc', the full + replacement value will be: 'MY_TOKEN_INFO_TYPE(3):abc' + + This annotation identifies the surrogate when inspecting + content using the custom infoType + ```SurrogateType`` `__. + This facilitates reversal of the surrogate when it occurs in + free text. + + In order for inspection to work properly, the name of this + infoType must not occur naturally anywhere in your data; + otherwise, inspection may find a surrogate that does not + correspond to an actual identifier. Therefore, choose your + custom infoType name carefully after considering what your + data looks like. One way to select a name that has a high + chance of yielding reliable detection is to include one or + more unicode characters that are highly improbable to exist + in your data. For example, assuming your data is entered + from a regular ASCII keyboard, the symbol with the hex code + point 29DD might be used like so: ⧝MY_TOKEN_TYPE + """ + class FfxCommonNativeAlphabet(proto.Enum): + r"""These are commonly used subsets of the alphabet that the FFX + mode natively supports. In the algorithm, the alphabet is + selected using the "radix". Therefore each corresponds to + particular radix. + """ + FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED = 0 + NUMERIC = 1 + HEXADECIMAL = 2 + UPPER_CASE_ALPHA_NUMERIC = 3 + ALPHA_NUMERIC = 4 + + crypto_key = proto.Field( + proto.MESSAGE, + number=1, + message='CryptoKey', + ) + context = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + common_alphabet = proto.Field( + proto.ENUM, + number=4, + oneof='alphabet', + enum=FfxCommonNativeAlphabet, + ) + custom_alphabet = proto.Field( + proto.STRING, + number=5, + oneof='alphabet', + ) + radix = proto.Field( + proto.INT32, + number=6, + oneof='alphabet', + ) + surrogate_info_type = proto.Field( + proto.MESSAGE, + number=8, + message=storage.InfoType, + ) + + +class CryptoKey(proto.Message): + r"""This is a data encryption key (DEK) (as opposed to + a key encryption key (KEK) stored by KMS). + When using KMS to wrap/unwrap DEKs, be sure to set an + appropriate IAM policy on the KMS CryptoKey (KEK) to ensure an + attacker cannot unwrap the data crypto key. + + Attributes: + transient (google.cloud.dlp_v2.types.TransientCryptoKey): + Transient crypto key + unwrapped (google.cloud.dlp_v2.types.UnwrappedCryptoKey): + Unwrapped crypto key + kms_wrapped (google.cloud.dlp_v2.types.KmsWrappedCryptoKey): + Kms wrapped key + """ + + transient = proto.Field( + proto.MESSAGE, + number=1, + oneof='source', + message='TransientCryptoKey', + ) + unwrapped = proto.Field( + proto.MESSAGE, + number=2, + oneof='source', + message='UnwrappedCryptoKey', + ) + kms_wrapped = proto.Field( + proto.MESSAGE, + number=3, + oneof='source', + message='KmsWrappedCryptoKey', + ) + + +class TransientCryptoKey(proto.Message): + r"""Use this to have a random data crypto key generated. + It will be discarded after the request finishes. + + Attributes: + name (str): + Required. Name of the key. This is an arbitrary string used + to differentiate different keys. A unique key is generated + per name: two separate ``TransientCryptoKey`` protos share + the same generated key if their names are the same. When the + data crypto key is generated, this name is not used in any + way (repeating the api call will result in a different key + being generated). + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class UnwrappedCryptoKey(proto.Message): + r"""Using raw keys is prone to security risks due to accidentally + leaking the key. Choose another type of key if possible. + + Attributes: + key (bytes): + Required. A 128/192/256 bit key. + """ + + key = proto.Field( + proto.BYTES, + number=1, + ) + + +class KmsWrappedCryptoKey(proto.Message): + r"""Include to use an existing data crypto key wrapped by KMS. + The wrapped key must be a 128/192/256 bit key. + Authorization requires the following IAM permissions when + sending a request to perform a crypto transformation using a + kms-wrapped crypto key: dlp.kms.encrypt + + Attributes: + wrapped_key (bytes): + Required. The wrapped data crypto key. + crypto_key_name (str): + Required. The resource name of the KMS + CryptoKey to use for unwrapping. + """ + + wrapped_key = proto.Field( + proto.BYTES, + number=1, + ) + crypto_key_name = proto.Field( + proto.STRING, + number=2, + ) + + +class DateShiftConfig(proto.Message): + r"""Shifts dates by random number of days, with option to be + consistent for the same context. See + https://cloud.google.com/dlp/docs/concepts-date-shifting to + learn more. + + Attributes: + upper_bound_days (int): + Required. Range of shift in days. Actual + shift will be selected at random within this + range (inclusive ends). Negative means shift to + earlier in time. Must not be more than 365250 + days (1000 years) each direction. + For example, 3 means shift date to at most 3 + days into the future. + lower_bound_days (int): + Required. For example, -5 means shift date to + at most 5 days back in the past. + context (google.cloud.dlp_v2.types.FieldId): + Points to the field that contains the + context, for example, an entity id. If set, must + also set cryptoKey. If set, shift will be + consistent for the given context. + crypto_key (google.cloud.dlp_v2.types.CryptoKey): + Causes the shift to be computed based on this key and the + context. This results in the same shift for the same context + and crypto_key. If set, must also set context. Can only be + applied to table items. + """ + + upper_bound_days = proto.Field( + proto.INT32, + number=1, + ) + lower_bound_days = proto.Field( + proto.INT32, + number=2, + ) + context = proto.Field( + proto.MESSAGE, + number=3, + message=storage.FieldId, + ) + crypto_key = proto.Field( + proto.MESSAGE, + number=4, + oneof='method', + message='CryptoKey', + ) + + +class InfoTypeTransformations(proto.Message): + r"""A type of transformation that will scan unstructured text and apply + various ``PrimitiveTransformation``\ s to each finding, where the + transformation is applied to only values that were identified as a + specific info_type. + + Attributes: + transformations (Sequence[google.cloud.dlp_v2.types.InfoTypeTransformations.InfoTypeTransformation]): + Required. Transformation for each infoType. + Cannot specify more than one for a given + infoType. + """ + + class InfoTypeTransformation(proto.Message): + r"""A transformation to apply to text that is identified as a specific + info_type. + + Attributes: + info_types (Sequence[google.cloud.dlp_v2.types.InfoType]): + InfoTypes to apply the transformation to. An empty list will + cause this transformation to apply to all findings that + correspond to infoTypes that were requested in + ``InspectConfig``. + primitive_transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): + Required. Primitive transformation to apply + to the infoType. + """ + + info_types = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + primitive_transformation = proto.Field( + proto.MESSAGE, + number=2, + message='PrimitiveTransformation', + ) + + transformations = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=InfoTypeTransformation, + ) + + +class FieldTransformation(proto.Message): + r"""The transformation to apply to the field. + Attributes: + fields (Sequence[google.cloud.dlp_v2.types.FieldId]): + Required. Input field(s) to apply the + transformation to. + condition (google.cloud.dlp_v2.types.RecordCondition): + Only apply the transformation if the condition evaluates to + true for the given ``RecordCondition``. The conditions are + allowed to reference fields that are not used in the actual + transformation. + + Example Use Cases: + + - Apply a different bucket transformation to an age column + if the zip code column for the same record is within a + specific range. + - Redact a field if the date of birth field is greater than + 85. + primitive_transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): + Apply the transformation to the entire field. + info_type_transformations (google.cloud.dlp_v2.types.InfoTypeTransformations): + Treat the contents of the field as free text, and + selectively transform content that matches an ``InfoType``. + """ + + fields = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + condition = proto.Field( + proto.MESSAGE, + number=3, + message='RecordCondition', + ) + primitive_transformation = proto.Field( + proto.MESSAGE, + number=4, + oneof='transformation', + message='PrimitiveTransformation', + ) + info_type_transformations = proto.Field( + proto.MESSAGE, + number=5, + oneof='transformation', + message='InfoTypeTransformations', + ) + + +class RecordTransformations(proto.Message): + r"""A type of transformation that is applied over structured data + such as a table. + + Attributes: + field_transformations (Sequence[google.cloud.dlp_v2.types.FieldTransformation]): + Transform the record by applying various + field transformations. + record_suppressions (Sequence[google.cloud.dlp_v2.types.RecordSuppression]): + Configuration defining which records get + suppressed entirely. Records that match any + suppression rule are omitted from the output. + """ + + field_transformations = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='FieldTransformation', + ) + record_suppressions = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='RecordSuppression', + ) + + +class RecordSuppression(proto.Message): + r"""Configuration to suppress records whose suppression + conditions evaluate to true. + + Attributes: + condition (google.cloud.dlp_v2.types.RecordCondition): + A condition that when it evaluates to true + will result in the record being evaluated to be + suppressed from the transformed content. + """ + + condition = proto.Field( + proto.MESSAGE, + number=1, + message='RecordCondition', + ) + + +class RecordCondition(proto.Message): + r"""A condition for determining whether a transformation should + be applied to a field. + + Attributes: + expressions (google.cloud.dlp_v2.types.RecordCondition.Expressions): + An expression. + """ + + class Condition(proto.Message): + r"""The field type of ``value`` and ``field`` do not need to match to be + considered equal, but not all comparisons are possible. EQUAL_TO and + NOT_EQUAL_TO attempt to compare even with incompatible types, but + all other comparisons are invalid with incompatible types. A + ``value`` of type: + + - ``string`` can be compared against all other types + - ``boolean`` can only be compared against other booleans + - ``integer`` can be compared against doubles or a string if the + string value can be parsed as an integer. + - ``double`` can be compared against integers or a string if the + string can be parsed as a double. + - ``Timestamp`` can be compared against strings in RFC 3339 date + string format. + - ``TimeOfDay`` can be compared against timestamps and strings in + the format of 'HH:mm:ss'. + + If we fail to compare do to type mismatch, a warning will be given + and the condition will evaluate to false. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Required. Field within the record this + condition is evaluated against. + operator (google.cloud.dlp_v2.types.RelationalOperator): + Required. Operator used to compare the field + or infoType to the value. + value (google.cloud.dlp_v2.types.Value): + Value to compare against. [Mandatory, except for ``EXISTS`` + tests.] + """ + + field = proto.Field( + proto.MESSAGE, + number=1, + message=storage.FieldId, + ) + operator = proto.Field( + proto.ENUM, + number=3, + enum='RelationalOperator', + ) + value = proto.Field( + proto.MESSAGE, + number=4, + message='Value', + ) + + class Conditions(proto.Message): + r"""A collection of conditions. + Attributes: + conditions (Sequence[google.cloud.dlp_v2.types.RecordCondition.Condition]): + A collection of conditions. + """ + + conditions = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='RecordCondition.Condition', + ) + + class Expressions(proto.Message): + r"""An expression, consisting or an operator and conditions. + Attributes: + logical_operator (google.cloud.dlp_v2.types.RecordCondition.Expressions.LogicalOperator): + The operator to apply to the result of conditions. Default + and currently only supported value is ``AND``. + conditions (google.cloud.dlp_v2.types.RecordCondition.Conditions): + Conditions to apply to the expression. + """ + class LogicalOperator(proto.Enum): + r"""Logical operators for conditional checks.""" + LOGICAL_OPERATOR_UNSPECIFIED = 0 + AND = 1 + + logical_operator = proto.Field( + proto.ENUM, + number=1, + enum='RecordCondition.Expressions.LogicalOperator', + ) + conditions = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message='RecordCondition.Conditions', + ) + + expressions = proto.Field( + proto.MESSAGE, + number=3, + message=Expressions, + ) + + +class TransformationOverview(proto.Message): + r"""Overview of the modifications that occurred. + Attributes: + transformed_bytes (int): + Total size in bytes that were transformed in + some way. + transformation_summaries (Sequence[google.cloud.dlp_v2.types.TransformationSummary]): + Transformations applied to the dataset. + """ + + transformed_bytes = proto.Field( + proto.INT64, + number=2, + ) + transformation_summaries = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='TransformationSummary', + ) + + +class TransformationSummary(proto.Message): + r"""Summary of a single transformation. Only one of 'transformation', + 'field_transformation', or 'record_suppress' will be set. + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + Set if the transformation was limited to a + specific InfoType. + field (google.cloud.dlp_v2.types.FieldId): + Set if the transformation was limited to a + specific FieldId. + transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): + The specific transformation these stats apply + to. + field_transformations (Sequence[google.cloud.dlp_v2.types.FieldTransformation]): + The field transformation that was applied. + If multiple field transformations are requested + for a single field, this list will contain all + of them; otherwise, only one is supplied. + record_suppress (google.cloud.dlp_v2.types.RecordSuppression): + The specific suppression option these stats + apply to. + results (Sequence[google.cloud.dlp_v2.types.TransformationSummary.SummaryResult]): + Collection of all transformations that took + place or had an error. + transformed_bytes (int): + Total size in bytes that were transformed in + some way. + """ + class TransformationResultCode(proto.Enum): + r"""Possible outcomes of transformations.""" + TRANSFORMATION_RESULT_CODE_UNSPECIFIED = 0 + SUCCESS = 1 + ERROR = 2 + + class SummaryResult(proto.Message): + r"""A collection that informs the user the number of times a particular + ``TransformationResultCode`` and error details occurred. + + Attributes: + count (int): + Number of transformations counted by this + result. + code (google.cloud.dlp_v2.types.TransformationSummary.TransformationResultCode): + Outcome of the transformation. + details (str): + A place for warnings or errors to show up if + a transformation didn't work as expected. + """ + + count = proto.Field( + proto.INT64, + number=1, + ) + code = proto.Field( + proto.ENUM, + number=2, + enum='TransformationSummary.TransformationResultCode', + ) + details = proto.Field( + proto.STRING, + number=3, + ) + + info_type = proto.Field( + proto.MESSAGE, + number=1, + message=storage.InfoType, + ) + field = proto.Field( + proto.MESSAGE, + number=2, + message=storage.FieldId, + ) + transformation = proto.Field( + proto.MESSAGE, + number=3, + message='PrimitiveTransformation', + ) + field_transformations = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='FieldTransformation', + ) + record_suppress = proto.Field( + proto.MESSAGE, + number=6, + message='RecordSuppression', + ) + results = proto.RepeatedField( + proto.MESSAGE, + number=4, + message=SummaryResult, + ) + transformed_bytes = proto.Field( + proto.INT64, + number=7, + ) + + +class Schedule(proto.Message): + r"""Schedule for triggeredJobs. + Attributes: + recurrence_period_duration (google.protobuf.duration_pb2.Duration): + With this option a job is started a regular + periodic basis. For example: every day (86400 + seconds). + A scheduled start time will be skipped if the + previous execution has not ended when its + scheduled time occurs. + This value must be set to a time duration + greater than or equal to 1 day and can be no + longer than 60 days. + """ + + recurrence_period_duration = proto.Field( + proto.MESSAGE, + number=1, + oneof='option', + message=duration_pb2.Duration, + ) + + +class Manual(proto.Message): + r"""Job trigger option for hybrid jobs. Jobs must be manually + created and finished. + """ + + +class InspectTemplate(proto.Message): + r"""The inspectTemplate contains a configuration (set of types of + sensitive data to be detected) to be used anywhere you otherwise + would normally specify InspectConfig. See + https://cloud.google.com/dlp/docs/concepts-templates to learn + more. + + Attributes: + name (str): + Output only. The template name. + + The template will have one of the following formats: + ``projects/PROJECT_ID/inspectTemplates/TEMPLATE_ID`` OR + ``organizations/ORGANIZATION_ID/inspectTemplates/TEMPLATE_ID``; + display_name (str): + Display name (max 256 chars). + description (str): + Short description (max 256 chars). + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of an + inspectTemplate. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of an + inspectTemplate. + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + The core content of the template. + Configuration of the scanning process. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + display_name = proto.Field( + proto.STRING, + number=2, + ) + description = proto.Field( + proto.STRING, + number=3, + ) + create_time = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + inspect_config = proto.Field( + proto.MESSAGE, + number=6, + message='InspectConfig', + ) + + +class DeidentifyTemplate(proto.Message): + r"""DeidentifyTemplates contains instructions on how to de- + dentify content. See https://cloud.google.com/dlp/docs/concepts- + templates to learn more. + + Attributes: + name (str): + Output only. The template name. + + The template will have one of the following formats: + ``projects/PROJECT_ID/deidentifyTemplates/TEMPLATE_ID`` OR + ``organizations/ORGANIZATION_ID/deidentifyTemplates/TEMPLATE_ID`` + display_name (str): + Display name (max 256 chars). + description (str): + Short description (max 256 chars). + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of an + inspectTemplate. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of an + inspectTemplate. + deidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): + ///////////// // The core content of the + template // /////////////// + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + display_name = proto.Field( + proto.STRING, + number=2, + ) + description = proto.Field( + proto.STRING, + number=3, + ) + create_time = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + deidentify_config = proto.Field( + proto.MESSAGE, + number=6, + message='DeidentifyConfig', + ) + + +class Error(proto.Message): + r"""Details information about an error encountered during job + execution or the results of an unsuccessful activation of the + JobTrigger. + + Attributes: + details (google.rpc.status_pb2.Status): + Detailed error codes and messages. + timestamps (Sequence[google.protobuf.timestamp_pb2.Timestamp]): + The times the error occurred. + """ + + details = proto.Field( + proto.MESSAGE, + number=1, + message=status_pb2.Status, + ) + timestamps = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class JobTrigger(proto.Message): + r"""Contains a configuration to make dlp api calls on a repeating + basis. See https://cloud.google.com/dlp/docs/concepts-job- + triggers to learn more. + + Attributes: + name (str): + Unique resource name for the triggeredJob, assigned by the + service when the triggeredJob is created, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + display_name (str): + Display name (max 100 chars) + description (str): + User provided description (max 256 chars) + inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): + For inspect jobs, a snapshot of the + configuration. + triggers (Sequence[google.cloud.dlp_v2.types.JobTrigger.Trigger]): + A list of triggers which will be OR'ed + together. Only one in the list needs to trigger + for a job to be started. The list may contain + only a single Schedule trigger and must have at + least one object. + errors (Sequence[google.cloud.dlp_v2.types.Error]): + Output only. A stream of errors encountered + when the trigger was activated. Repeated errors + may result in the JobTrigger automatically being + paused. Will return the last 100 errors. + Whenever the JobTrigger is modified this list + will be cleared. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The creation timestamp of a + triggeredJob. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The last update timestamp of a + triggeredJob. + last_run_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The timestamp of the last time + this trigger executed. + status (google.cloud.dlp_v2.types.JobTrigger.Status): + Required. A status for this trigger. + """ + class Status(proto.Enum): + r"""Whether the trigger is currently active. If PAUSED or + CANCELLED, no jobs will be created with this configuration. The + service may automatically pause triggers experiencing frequent + errors. To restart a job, set the status to HEALTHY after + correcting user errors. + """ + STATUS_UNSPECIFIED = 0 + HEALTHY = 1 + PAUSED = 2 + CANCELLED = 3 + + class Trigger(proto.Message): + r"""What event needs to occur for a new job to be started. + Attributes: + schedule (google.cloud.dlp_v2.types.Schedule): + Create a job on a repeating basis based on + the elapse of time. + manual (google.cloud.dlp_v2.types.Manual): + For use with hybrid jobs. Jobs must be + manually created and finished. Early access + feature is in a pre-release state and might + change or have limited support. For more + information, see + https://cloud.google.com/products#product- + launch-stages. + """ + + schedule = proto.Field( + proto.MESSAGE, + number=1, + oneof='trigger', + message='Schedule', + ) + manual = proto.Field( + proto.MESSAGE, + number=2, + oneof='trigger', + message='Manual', + ) + + name = proto.Field( + proto.STRING, + number=1, + ) + display_name = proto.Field( + proto.STRING, + number=2, + ) + description = proto.Field( + proto.STRING, + number=3, + ) + inspect_job = proto.Field( + proto.MESSAGE, + number=4, + oneof='job', + message='InspectJobConfig', + ) + triggers = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=Trigger, + ) + errors = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='Error', + ) + create_time = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + update_time = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + last_run_time = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + status = proto.Field( + proto.ENUM, + number=10, + enum=Status, + ) + + +class Action(proto.Message): + r"""A task to execute on the completion of a job. + See https://cloud.google.com/dlp/docs/concepts-actions to learn + more. + + Attributes: + save_findings (google.cloud.dlp_v2.types.Action.SaveFindings): + Save resulting findings in a provided + location. + pub_sub (google.cloud.dlp_v2.types.Action.PublishToPubSub): + Publish a notification to a pubsub topic. + publish_summary_to_cscc (google.cloud.dlp_v2.types.Action.PublishSummaryToCscc): + Publish summary to Cloud Security Command + Center (Alpha). + publish_findings_to_cloud_data_catalog (google.cloud.dlp_v2.types.Action.PublishFindingsToCloudDataCatalog): + Publish findings to Cloud Datahub. + job_notification_emails (google.cloud.dlp_v2.types.Action.JobNotificationEmails): + Enable email notification for project owners + and editors on job's completion/failure. + publish_to_stackdriver (google.cloud.dlp_v2.types.Action.PublishToStackdriver): + Enable Stackdriver metric dlp.googleapis.com/finding_count. + """ + + class SaveFindings(proto.Message): + r"""If set, the detailed findings will be persisted to the + specified OutputStorageConfig. Only a single instance of this + action can be specified. + Compatible with: Inspect, Risk + + Attributes: + output_config (google.cloud.dlp_v2.types.OutputStorageConfig): + Location to store findings outside of DLP. + """ + + output_config = proto.Field( + proto.MESSAGE, + number=1, + message='OutputStorageConfig', + ) + + class PublishToPubSub(proto.Message): + r"""Publish a message into given Pub/Sub topic when DlpJob has + completed. The message contains a single field, ``DlpJobName``, + which is equal to the finished job's + ```DlpJob.name`` `__. + Compatible with: Inspect, Risk + + Attributes: + topic (str): + Cloud Pub/Sub topic to send notifications to. + The topic must have given publishing access + rights to the DLP API service account executing + the long running DlpJob sending the + notifications. Format is + projects/{project}/topics/{topic}. + """ + + topic = proto.Field( + proto.STRING, + number=1, + ) + + class PublishSummaryToCscc(proto.Message): + r"""Publish the result summary of a DlpJob to the Cloud Security + Command Center (CSCC Alpha). + This action is only available for projects which are parts of an + organization and whitelisted for the alpha Cloud Security + Command Center. + The action will publish count of finding instances and their + info types. The summary of findings will be persisted in CSCC + and are governed by CSCC service-specific policy, see + https://cloud.google.com/terms/service-terms Only a single + instance of this action can be specified. Compatible with: + Inspect + """ + + class PublishFindingsToCloudDataCatalog(proto.Message): + r"""Publish findings of a DlpJob to Cloud Data Catalog. Labels + summarizing the results of the DlpJob will be applied to the + entry for the resource scanned in Cloud Data Catalog. Any labels + previously written by another DlpJob will be deleted. InfoType + naming patterns are strictly enforced when using this feature. + Note that the findings will be persisted in Cloud Data Catalog + storage and are governed by Data Catalog service-specific + policy, see https://cloud.google.com/terms/service-terms + Only a single instance of this action can be specified and only + allowed if all resources being scanned are BigQuery tables. + Compatible with: Inspect + """ + + class JobNotificationEmails(proto.Message): + r"""Enable email notification to project owners and editors on + jobs's completion/failure. + """ + + class PublishToStackdriver(proto.Message): + r"""Enable Stackdriver metric dlp.googleapis.com/finding_count. This + will publish a metric to stack driver on each infotype requested and + how many findings were found for it. CustomDetectors will be + bucketed as 'Custom' under the Stackdriver label 'info_type'. + """ + + save_findings = proto.Field( + proto.MESSAGE, + number=1, + oneof='action', + message=SaveFindings, + ) + pub_sub = proto.Field( + proto.MESSAGE, + number=2, + oneof='action', + message=PublishToPubSub, + ) + publish_summary_to_cscc = proto.Field( + proto.MESSAGE, + number=3, + oneof='action', + message=PublishSummaryToCscc, + ) + publish_findings_to_cloud_data_catalog = proto.Field( + proto.MESSAGE, + number=5, + oneof='action', + message=PublishFindingsToCloudDataCatalog, + ) + job_notification_emails = proto.Field( + proto.MESSAGE, + number=8, + oneof='action', + message=JobNotificationEmails, + ) + publish_to_stackdriver = proto.Field( + proto.MESSAGE, + number=9, + oneof='action', + message=PublishToStackdriver, + ) + + +class CreateInspectTemplateRequest(proto.Message): + r"""Request message for CreateInspectTemplate. + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + inspect_template (google.cloud.dlp_v2.types.InspectTemplate): + Required. The InspectTemplate to create. + template_id (str): + The template id can contain uppercase and lowercase letters, + numbers, and hyphens; that is, it must match the regular + expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 + characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + inspect_template = proto.Field( + proto.MESSAGE, + number=2, + message='InspectTemplate', + ) + template_id = proto.Field( + proto.STRING, + number=3, + ) + location_id = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateInspectTemplateRequest(proto.Message): + r"""Request message for UpdateInspectTemplate. + Attributes: + name (str): + Required. Resource name of organization and inspectTemplate + to be updated, for example + ``organizations/433245324/inspectTemplates/432452342`` or + projects/project-id/inspectTemplates/432452342. + inspect_template (google.cloud.dlp_v2.types.InspectTemplate): + New InspectTemplate value. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get updated. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + inspect_template = proto.Field( + proto.MESSAGE, + number=2, + message='InspectTemplate', + ) + update_mask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class GetInspectTemplateRequest(proto.Message): + r"""Request message for GetInspectTemplate. + Attributes: + name (str): + Required. Resource name of the organization and + inspectTemplate to be read, for example + ``organizations/433245324/inspectTemplates/432452342`` or + projects/project-id/inspectTemplates/432452342. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class ListInspectTemplatesRequest(proto.Message): + r"""Request message for ListInspectTemplates. + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + page_token (str): + Page token to continue retrieval. Comes from previous call + to ``ListInspectTemplates``. + page_size (int): + Size of the page, can be limited by server. + If zero server returns a page of max size 100. + order_by (str): + Comma separated list of fields to order by, followed by + ``asc`` or ``desc`` postfix. This list is case-insensitive, + default sorting order is ascending, redundant space + characters are insignificant. + + Example: ``name asc,update_time, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to time the template was + created. + - ``update_time``: corresponds to time the template was + last updated. + - ``name``: corresponds to template's name. + - ``display_name``: corresponds to template's display name. + location_id (str): + Deprecated. This field has no effect. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + page_token = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + order_by = proto.Field( + proto.STRING, + number=4, + ) + location_id = proto.Field( + proto.STRING, + number=5, + ) + + +class ListInspectTemplatesResponse(proto.Message): + r"""Response message for ListInspectTemplates. + Attributes: + inspect_templates (Sequence[google.cloud.dlp_v2.types.InspectTemplate]): + List of inspectTemplates, up to page_size in + ListInspectTemplatesRequest. + next_page_token (str): + If the next page is available then the next + page token to be used in following + ListInspectTemplates request. + """ + + @property + def raw_page(self): + return self + + inspect_templates = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='InspectTemplate', + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteInspectTemplateRequest(proto.Message): + r"""Request message for DeleteInspectTemplate. + Attributes: + name (str): + Required. Resource name of the organization and + inspectTemplate to be deleted, for example + ``organizations/433245324/inspectTemplates/432452342`` or + projects/project-id/inspectTemplates/432452342. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateJobTriggerRequest(proto.Message): + r"""Request message for CreateJobTrigger. + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + job_trigger (google.cloud.dlp_v2.types.JobTrigger): + Required. The JobTrigger to create. + trigger_id (str): + The trigger id can contain uppercase and lowercase letters, + numbers, and hyphens; that is, it must match the regular + expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 + characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + job_trigger = proto.Field( + proto.MESSAGE, + number=2, + message='JobTrigger', + ) + trigger_id = proto.Field( + proto.STRING, + number=3, + ) + location_id = proto.Field( + proto.STRING, + number=4, + ) + + +class ActivateJobTriggerRequest(proto.Message): + r"""Request message for ActivateJobTrigger. + Attributes: + name (str): + Required. Resource name of the trigger to activate, for + example ``projects/dlp-test-project/jobTriggers/53234423``. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateJobTriggerRequest(proto.Message): + r"""Request message for UpdateJobTrigger. + Attributes: + name (str): + Required. Resource name of the project and the triggeredJob, + for example + ``projects/dlp-test-project/jobTriggers/53234423``. + job_trigger (google.cloud.dlp_v2.types.JobTrigger): + New JobTrigger value. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get updated. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + job_trigger = proto.Field( + proto.MESSAGE, + number=2, + message='JobTrigger', + ) + update_mask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class GetJobTriggerRequest(proto.Message): + r"""Request message for GetJobTrigger. + Attributes: + name (str): + Required. Resource name of the project and the triggeredJob, + for example + ``projects/dlp-test-project/jobTriggers/53234423``. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateDlpJobRequest(proto.Message): + r"""Request message for CreateDlpJobRequest. Used to initiate + long running jobs such as calculating risk metrics or inspecting + Google Cloud Storage. + + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): + Set to control what and how to inspect. + risk_job (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): + Set to choose what metric to calculate. + job_id (str): + The job id can contain uppercase and lowercase letters, + numbers, and hyphens; that is, it must match the regular + expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 + characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + inspect_job = proto.Field( + proto.MESSAGE, + number=2, + oneof='job', + message='InspectJobConfig', + ) + risk_job = proto.Field( + proto.MESSAGE, + number=3, + oneof='job', + message='RiskAnalysisJobConfig', + ) + job_id = proto.Field( + proto.STRING, + number=4, + ) + location_id = proto.Field( + proto.STRING, + number=5, + ) + + +class ListJobTriggersRequest(proto.Message): + r"""Request message for ListJobTriggers. + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + page_token (str): + Page token to continue retrieval. Comes from previous call + to ListJobTriggers. ``order_by`` field must not change for + subsequent calls. + page_size (int): + Size of the page, can be limited by a server. + order_by (str): + Comma separated list of triggeredJob fields to order by, + followed by ``asc`` or ``desc`` postfix. This list is + case-insensitive, default sorting order is ascending, + redundant space characters are insignificant. + + Example: ``name asc,update_time, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to time the JobTrigger was + created. + - ``update_time``: corresponds to time the JobTrigger was + last updated. + - ``last_run_time``: corresponds to the last time the + JobTrigger ran. + - ``name``: corresponds to JobTrigger's name. + - ``display_name``: corresponds to JobTrigger's display + name. + - ``status``: corresponds to JobTrigger's status. + filter (str): + Allows filtering. + + Supported syntax: + + - Filter expressions are made up of one or more + restrictions. + - Restrictions can be combined by ``AND`` or ``OR`` logical + operators. A sequence of restrictions implicitly uses + ``AND``. + - A restriction has the form of + ``{field} {operator} {value}``. + - Supported fields/values for inspect jobs: + + - ``status`` - HEALTHY|PAUSED|CANCELLED + - ``inspected_storage`` - + DATASTORE|CLOUD_STORAGE|BIGQUERY + - 'last_run_time\` - RFC 3339 formatted timestamp, + surrounded by quotation marks. Nanoseconds are + ignored. + - 'error_count' - Number of errors that have occurred + while running. + + - The operator must be ``=`` or ``!=`` for status and + inspected_storage. + + Examples: + + - inspected_storage = cloud_storage AND status = HEALTHY + - inspected_storage = cloud_storage OR inspected_storage = + bigquery + - inspected_storage = cloud_storage AND (state = PAUSED OR + state = HEALTHY) + - last_run_time > "2017-12-12T00:00:00+00:00" + + The length of this field should be no more than 500 + characters. + location_id (str): + Deprecated. This field has no effect. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + page_token = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + order_by = proto.Field( + proto.STRING, + number=4, + ) + filter = proto.Field( + proto.STRING, + number=5, + ) + location_id = proto.Field( + proto.STRING, + number=7, + ) + + +class ListJobTriggersResponse(proto.Message): + r"""Response message for ListJobTriggers. + Attributes: + job_triggers (Sequence[google.cloud.dlp_v2.types.JobTrigger]): + List of triggeredJobs, up to page_size in + ListJobTriggersRequest. + next_page_token (str): + If the next page is available then the next + page token to be used in following + ListJobTriggers request. + """ + + @property + def raw_page(self): + return self + + job_triggers = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='JobTrigger', + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteJobTriggerRequest(proto.Message): + r"""Request message for DeleteJobTrigger. + Attributes: + name (str): + Required. Resource name of the project and the triggeredJob, + for example + ``projects/dlp-test-project/jobTriggers/53234423``. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class InspectJobConfig(proto.Message): + r"""Controls what and how to inspect for findings. + Attributes: + storage_config (google.cloud.dlp_v2.types.StorageConfig): + The data to scan. + inspect_config (google.cloud.dlp_v2.types.InspectConfig): + How and what to scan for. + inspect_template_name (str): + If provided, will be used as the default for all values in + InspectConfig. ``inspect_config`` will be merged into the + values persisted as part of the template. + actions (Sequence[google.cloud.dlp_v2.types.Action]): + Actions to execute at the completion of the + job. + """ + + storage_config = proto.Field( + proto.MESSAGE, + number=1, + message=storage.StorageConfig, + ) + inspect_config = proto.Field( + proto.MESSAGE, + number=2, + message='InspectConfig', + ) + inspect_template_name = proto.Field( + proto.STRING, + number=3, + ) + actions = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='Action', + ) + + +class DlpJob(proto.Message): + r"""Combines all of the information about a DLP job. + Attributes: + name (str): + The server-assigned name. + type_ (google.cloud.dlp_v2.types.DlpJobType): + The type of job. + state (google.cloud.dlp_v2.types.DlpJob.JobState): + State of a job. + risk_details (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails): + Results from analyzing risk of a data source. + inspect_details (google.cloud.dlp_v2.types.InspectDataSourceDetails): + Results from inspecting a data source. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the job was created. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the job started. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the job finished. + job_trigger_name (str): + If created by a job trigger, the resource + name of the trigger that instantiated the job. + errors (Sequence[google.cloud.dlp_v2.types.Error]): + A stream of errors encountered running the + job. + """ + class JobState(proto.Enum): + r"""Possible states of a job. New items may be added.""" + JOB_STATE_UNSPECIFIED = 0 + PENDING = 1 + RUNNING = 2 + DONE = 3 + CANCELED = 4 + FAILED = 5 + ACTIVE = 6 + + name = proto.Field( + proto.STRING, + number=1, + ) + type_ = proto.Field( + proto.ENUM, + number=2, + enum='DlpJobType', + ) + state = proto.Field( + proto.ENUM, + number=3, + enum=JobState, + ) + risk_details = proto.Field( + proto.MESSAGE, + number=4, + oneof='details', + message='AnalyzeDataSourceRiskDetails', + ) + inspect_details = proto.Field( + proto.MESSAGE, + number=5, + oneof='details', + message='InspectDataSourceDetails', + ) + create_time = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + start_time = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + end_time = proto.Field( + proto.MESSAGE, + number=8, + message=timestamp_pb2.Timestamp, + ) + job_trigger_name = proto.Field( + proto.STRING, + number=10, + ) + errors = proto.RepeatedField( + proto.MESSAGE, + number=11, + message='Error', + ) + + +class GetDlpJobRequest(proto.Message): + r"""The request message for [DlpJobs.GetDlpJob][]. + Attributes: + name (str): + Required. The name of the DlpJob resource. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDlpJobsRequest(proto.Message): + r"""The request message for listing DLP jobs. + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on whether you + have `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + filter (str): + Allows filtering. + + Supported syntax: + + - Filter expressions are made up of one or more + restrictions. + - Restrictions can be combined by ``AND`` or ``OR`` logical + operators. A sequence of restrictions implicitly uses + ``AND``. + - A restriction has the form of + ``{field} {operator} {value}``. + - Supported fields/values for inspect jobs: + + - ``state`` - PENDING|RUNNING|CANCELED|FINISHED|FAILED + - ``inspected_storage`` - + DATASTORE|CLOUD_STORAGE|BIGQUERY + - ``trigger_name`` - The resource name of the trigger + that created job. + - 'end_time\` - Corresponds to time the job finished. + - 'start_time\` - Corresponds to time the job finished. + + - Supported fields for risk analysis jobs: + + - ``state`` - RUNNING|CANCELED|FINISHED|FAILED + - 'end_time\` - Corresponds to time the job finished. + - 'start_time\` - Corresponds to time the job finished. + + - The operator must be ``=`` or ``!=``. + + Examples: + + - inspected_storage = cloud_storage AND state = done + - inspected_storage = cloud_storage OR inspected_storage = + bigquery + - inspected_storage = cloud_storage AND (state = done OR + state = canceled) + - end_time > "2017-12-12T00:00:00+00:00" + + The length of this field should be no more than 500 + characters. + page_size (int): + The standard list page size. + page_token (str): + The standard list page token. + type_ (google.cloud.dlp_v2.types.DlpJobType): + The type of job. Defaults to ``DlpJobType.INSPECT`` + order_by (str): + Comma separated list of fields to order by, followed by + ``asc`` or ``desc`` postfix. This list is case-insensitive, + default sorting order is ascending, redundant space + characters are insignificant. + + Example: ``name asc, end_time asc, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to time the job was created. + - ``end_time``: corresponds to time the job ended. + - ``name``: corresponds to job's name. + - ``state``: corresponds to ``state`` + location_id (str): + Deprecated. This field has no effect. + """ + + parent = proto.Field( + proto.STRING, + number=4, + ) + filter = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) + type_ = proto.Field( + proto.ENUM, + number=5, + enum='DlpJobType', + ) + order_by = proto.Field( + proto.STRING, + number=6, + ) + location_id = proto.Field( + proto.STRING, + number=7, + ) + + +class ListDlpJobsResponse(proto.Message): + r"""The response message for listing DLP jobs. + Attributes: + jobs (Sequence[google.cloud.dlp_v2.types.DlpJob]): + A list of DlpJobs that matches the specified + filter in the request. + next_page_token (str): + The standard List next-page token. + """ + + @property + def raw_page(self): + return self + + jobs = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DlpJob', + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class CancelDlpJobRequest(proto.Message): + r"""The request message for canceling a DLP job. + Attributes: + name (str): + Required. The name of the DlpJob resource to + be cancelled. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class FinishDlpJobRequest(proto.Message): + r"""The request message for finishing a DLP hybrid job. + Attributes: + name (str): + Required. The name of the DlpJob resource to + be cancelled. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteDlpJobRequest(proto.Message): + r"""The request message for deleting a DLP job. + Attributes: + name (str): + Required. The name of the DlpJob resource to + be deleted. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class CreateDeidentifyTemplateRequest(proto.Message): + r"""Request message for CreateDeidentifyTemplate. + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): + Required. The DeidentifyTemplate to create. + template_id (str): + The template id can contain uppercase and lowercase letters, + numbers, and hyphens; that is, it must match the regular + expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 + characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + deidentify_template = proto.Field( + proto.MESSAGE, + number=2, + message='DeidentifyTemplate', + ) + template_id = proto.Field( + proto.STRING, + number=3, + ) + location_id = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateDeidentifyTemplateRequest(proto.Message): + r"""Request message for UpdateDeidentifyTemplate. + Attributes: + name (str): + Required. Resource name of organization and deidentify + template to be updated, for example + ``organizations/433245324/deidentifyTemplates/432452342`` or + projects/project-id/deidentifyTemplates/432452342. + deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): + New DeidentifyTemplate value. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get updated. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + deidentify_template = proto.Field( + proto.MESSAGE, + number=2, + message='DeidentifyTemplate', + ) + update_mask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class GetDeidentifyTemplateRequest(proto.Message): + r"""Request message for GetDeidentifyTemplate. + Attributes: + name (str): + Required. Resource name of the organization and deidentify + template to be read, for example + ``organizations/433245324/deidentifyTemplates/432452342`` or + projects/project-id/deidentifyTemplates/432452342. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDeidentifyTemplatesRequest(proto.Message): + r"""Request message for ListDeidentifyTemplates. + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + page_token (str): + Page token to continue retrieval. Comes from previous call + to ``ListDeidentifyTemplates``. + page_size (int): + Size of the page, can be limited by server. + If zero server returns a page of max size 100. + order_by (str): + Comma separated list of fields to order by, followed by + ``asc`` or ``desc`` postfix. This list is case-insensitive, + default sorting order is ascending, redundant space + characters are insignificant. + + Example: ``name asc,update_time, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to time the template was + created. + - ``update_time``: corresponds to time the template was + last updated. + - ``name``: corresponds to template's name. + - ``display_name``: corresponds to template's display name. + location_id (str): + Deprecated. This field has no effect. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + page_token = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + order_by = proto.Field( + proto.STRING, + number=4, + ) + location_id = proto.Field( + proto.STRING, + number=5, + ) + + +class ListDeidentifyTemplatesResponse(proto.Message): + r"""Response message for ListDeidentifyTemplates. + Attributes: + deidentify_templates (Sequence[google.cloud.dlp_v2.types.DeidentifyTemplate]): + List of deidentify templates, up to page_size in + ListDeidentifyTemplatesRequest. + next_page_token (str): + If the next page is available then the next + page token to be used in following + ListDeidentifyTemplates request. + """ + + @property + def raw_page(self): + return self + + deidentify_templates = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='DeidentifyTemplate', + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteDeidentifyTemplateRequest(proto.Message): + r"""Request message for DeleteDeidentifyTemplate. + Attributes: + name (str): + Required. Resource name of the organization and deidentify + template to be deleted, for example + ``organizations/433245324/deidentifyTemplates/432452342`` or + projects/project-id/deidentifyTemplates/432452342. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class LargeCustomDictionaryConfig(proto.Message): + r"""Configuration for a custom dictionary created from a data source of + any size up to the maximum size defined in the + `limits `__ page. The artifacts + of dictionary creation are stored in the specified Google Cloud + Storage location. Consider using ``CustomInfoType.Dictionary`` for + smaller dictionaries that satisfy the size requirements. + + Attributes: + output_path (google.cloud.dlp_v2.types.CloudStoragePath): + Location to store dictionary artifacts in + Google Cloud Storage. These files will only be + accessible by project owners and the DLP API. If + any of these artifacts are modified, the + dictionary is considered invalid and can no + longer be used. + cloud_storage_file_set (google.cloud.dlp_v2.types.CloudStorageFileSet): + Set of files containing newline-delimited + lists of dictionary phrases. + big_query_field (google.cloud.dlp_v2.types.BigQueryField): + Field in a BigQuery table where each cell + represents a dictionary phrase. + """ + + output_path = proto.Field( + proto.MESSAGE, + number=1, + message=storage.CloudStoragePath, + ) + cloud_storage_file_set = proto.Field( + proto.MESSAGE, + number=2, + oneof='source', + message=storage.CloudStorageFileSet, + ) + big_query_field = proto.Field( + proto.MESSAGE, + number=3, + oneof='source', + message=storage.BigQueryField, + ) + + +class LargeCustomDictionaryStats(proto.Message): + r"""Summary statistics of a custom dictionary. + Attributes: + approx_num_phrases (int): + Approximate number of distinct phrases in the + dictionary. + """ + + approx_num_phrases = proto.Field( + proto.INT64, + number=1, + ) + + +class StoredInfoTypeConfig(proto.Message): + r"""Configuration for stored infoTypes. All fields and subfield + are provided by the user. For more information, see + https://cloud.google.com/dlp/docs/creating-custom-infotypes. + + Attributes: + display_name (str): + Display name of the StoredInfoType (max 256 + characters). + description (str): + Description of the StoredInfoType (max 256 + characters). + large_custom_dictionary (google.cloud.dlp_v2.types.LargeCustomDictionaryConfig): + StoredInfoType where findings are defined by + a dictionary of phrases. + dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): + Store dictionary-based CustomInfoType. + regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): + Store regular expression-based + StoredInfoType. + """ + + display_name = proto.Field( + proto.STRING, + number=1, + ) + description = proto.Field( + proto.STRING, + number=2, + ) + large_custom_dictionary = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message='LargeCustomDictionaryConfig', + ) + dictionary = proto.Field( + proto.MESSAGE, + number=4, + oneof='type', + message=storage.CustomInfoType.Dictionary, + ) + regex = proto.Field( + proto.MESSAGE, + number=5, + oneof='type', + message=storage.CustomInfoType.Regex, + ) + + +class StoredInfoTypeStats(proto.Message): + r"""Statistics for a StoredInfoType. + Attributes: + large_custom_dictionary (google.cloud.dlp_v2.types.LargeCustomDictionaryStats): + StoredInfoType where findings are defined by + a dictionary of phrases. + """ + + large_custom_dictionary = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message='LargeCustomDictionaryStats', + ) + + +class StoredInfoTypeVersion(proto.Message): + r"""Version of a StoredInfoType, including the configuration used + to build it, create timestamp, and current state. + + Attributes: + config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): + StoredInfoType configuration. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Create timestamp of the version. Read-only, + determined by the system when the version is + created. + state (google.cloud.dlp_v2.types.StoredInfoTypeState): + Stored info type version state. Read-only, + updated by the system during dictionary + creation. + errors (Sequence[google.cloud.dlp_v2.types.Error]): + Errors that occurred when creating this storedInfoType + version, or anomalies detected in the storedInfoType data + that render it unusable. Only the five most recent errors + will be displayed, with the most recent error appearing + first. + + For example, some of the data for stored custom dictionaries + is put in the user's Google Cloud Storage bucket, and if + this data is modified or deleted by the user or another + system, the dictionary becomes invalid. + + If any errors occur, fix the problem indicated by the error + message and use the UpdateStoredInfoType API method to + create another version of the storedInfoType to continue + using it, reusing the same ``config`` if it was not the + source of the error. + stats (google.cloud.dlp_v2.types.StoredInfoTypeStats): + Statistics about this storedInfoType version. + """ + + config = proto.Field( + proto.MESSAGE, + number=1, + message='StoredInfoTypeConfig', + ) + create_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + state = proto.Field( + proto.ENUM, + number=3, + enum='StoredInfoTypeState', + ) + errors = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='Error', + ) + stats = proto.Field( + proto.MESSAGE, + number=5, + message='StoredInfoTypeStats', + ) + + +class StoredInfoType(proto.Message): + r"""StoredInfoType resource message that contains information + about the current version and any pending updates. + + Attributes: + name (str): + Resource name. + current_version (google.cloud.dlp_v2.types.StoredInfoTypeVersion): + Current version of the stored info type. + pending_versions (Sequence[google.cloud.dlp_v2.types.StoredInfoTypeVersion]): + Pending versions of the stored info type. + Empty if no versions are pending. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + current_version = proto.Field( + proto.MESSAGE, + number=2, + message='StoredInfoTypeVersion', + ) + pending_versions = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='StoredInfoTypeVersion', + ) + + +class CreateStoredInfoTypeRequest(proto.Message): + r"""Request message for CreateStoredInfoType. + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): + Required. Configuration of the storedInfoType + to create. + stored_info_type_id (str): + The storedInfoType ID can contain uppercase and lowercase + letters, numbers, and hyphens; that is, it must match the + regular expression: ``[a-zA-Z\d-_]+``. The maximum length is + 100 characters. Can be empty to allow the system to generate + one. + location_id (str): + Deprecated. This field has no effect. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + config = proto.Field( + proto.MESSAGE, + number=2, + message='StoredInfoTypeConfig', + ) + stored_info_type_id = proto.Field( + proto.STRING, + number=3, + ) + location_id = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateStoredInfoTypeRequest(proto.Message): + r"""Request message for UpdateStoredInfoType. + Attributes: + name (str): + Required. Resource name of organization and storedInfoType + to be updated, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): + Updated configuration for the storedInfoType. + If not provided, a new version of the + storedInfoType will be created with the existing + configuration. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Mask to control which fields get updated. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + config = proto.Field( + proto.MESSAGE, + number=2, + message='StoredInfoTypeConfig', + ) + update_mask = proto.Field( + proto.MESSAGE, + number=3, + message=field_mask_pb2.FieldMask, + ) + + +class GetStoredInfoTypeRequest(proto.Message): + r"""Request message for GetStoredInfoType. + Attributes: + name (str): + Required. Resource name of the organization and + storedInfoType to be read, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class ListStoredInfoTypesRequest(proto.Message): + r"""Request message for ListStoredInfoTypes. + Attributes: + parent (str): + Required. Parent resource name. + + The format of this value varies depending on the scope of + the request (project or organization) and whether you have + `specified a processing + location `__: + + - Projects scope, location specified: + ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID + - Projects scope, no location specified (defaults to + global): ``projects/``\ PROJECT_ID + - Organizations scope, location specified: + ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID + - Organizations scope, no location specified (defaults to + global): ``organizations/``\ ORG_ID + + The following example ``parent`` string specifies a parent + project with the identifier ``example-project``, and + specifies the ``europe-west3`` location for processing data: + + :: + + parent=projects/example-project/locations/europe-west3 + page_token (str): + Page token to continue retrieval. Comes from previous call + to ``ListStoredInfoTypes``. + page_size (int): + Size of the page, can be limited by server. + If zero server returns a page of max size 100. + order_by (str): + Comma separated list of fields to order by, followed by + ``asc`` or ``desc`` postfix. This list is case-insensitive, + default sorting order is ascending, redundant space + characters are insignificant. + + Example: ``name asc, display_name, create_time desc`` + + Supported fields are: + + - ``create_time``: corresponds to time the most recent + version of the resource was created. + - ``state``: corresponds to the state of the resource. + - ``name``: corresponds to resource name. + - ``display_name``: corresponds to info type's display + name. + location_id (str): + Deprecated. This field has no effect. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + page_token = proto.Field( + proto.STRING, + number=2, + ) + page_size = proto.Field( + proto.INT32, + number=3, + ) + order_by = proto.Field( + proto.STRING, + number=4, + ) + location_id = proto.Field( + proto.STRING, + number=5, + ) + + +class ListStoredInfoTypesResponse(proto.Message): + r"""Response message for ListStoredInfoTypes. + Attributes: + stored_info_types (Sequence[google.cloud.dlp_v2.types.StoredInfoType]): + List of storedInfoTypes, up to page_size in + ListStoredInfoTypesRequest. + next_page_token (str): + If the next page is available then the next + page token to be used in following + ListStoredInfoTypes request. + """ + + @property + def raw_page(self): + return self + + stored_info_types = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='StoredInfoType', + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class DeleteStoredInfoTypeRequest(proto.Message): + r"""Request message for DeleteStoredInfoType. + Attributes: + name (str): + Required. Resource name of the organization and + storedInfoType to be deleted, for example + ``organizations/433245324/storedInfoTypes/432452342`` or + projects/project-id/storedInfoTypes/432452342. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class HybridInspectJobTriggerRequest(proto.Message): + r"""Request to search for potentially sensitive info in a custom + location. + + Attributes: + name (str): + Required. Resource name of the trigger to execute a hybrid + inspect on, for example + ``projects/dlp-test-project/jobTriggers/53234423``. + hybrid_item (google.cloud.dlp_v2.types.HybridContentItem): + The item to inspect. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + hybrid_item = proto.Field( + proto.MESSAGE, + number=3, + message='HybridContentItem', + ) + + +class HybridInspectDlpJobRequest(proto.Message): + r"""Request to search for potentially sensitive info in a custom + location. + + Attributes: + name (str): + Required. Resource name of the job to execute a hybrid + inspect on, for example + ``projects/dlp-test-project/dlpJob/53234423``. + hybrid_item (google.cloud.dlp_v2.types.HybridContentItem): + The item to inspect. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + hybrid_item = proto.Field( + proto.MESSAGE, + number=3, + message='HybridContentItem', + ) + + +class HybridContentItem(proto.Message): + r"""An individual hybrid item to inspect. Will be stored + temporarily during processing. + + Attributes: + item (google.cloud.dlp_v2.types.ContentItem): + The item to inspect. + finding_details (google.cloud.dlp_v2.types.HybridFindingDetails): + Supplementary information that will be added + to each finding. + """ + + item = proto.Field( + proto.MESSAGE, + number=1, + message='ContentItem', + ) + finding_details = proto.Field( + proto.MESSAGE, + number=2, + message='HybridFindingDetails', + ) + + +class HybridFindingDetails(proto.Message): + r"""Populate to associate additional data with each finding. + Attributes: + container_details (google.cloud.dlp_v2.types.Container): + Details about the container where the content + being inspected is from. + file_offset (int): + Offset in bytes of the line, from the + beginning of the file, where the finding is + located. Populate if the item being scanned is + only part of a bigger item, such as a shard of a + file and you want to track the absolute position + of the finding. + row_offset (int): + Offset of the row for tables. Populate if the + row(s) being scanned are part of a bigger + dataset and you want to keep track of their + absolute position. + table_options (google.cloud.dlp_v2.types.TableOptions): + If the container is a table, additional information to make + findings meaningful such as the columns that are primary + keys. If not known ahead of time, can also be set within + each inspect hybrid call and the two will be merged. Note + that identifying_fields will only be stored to BigQuery, and + only if the BigQuery action has been included. + labels (Sequence[google.cloud.dlp_v2.types.HybridFindingDetails.LabelsEntry]): + Labels to represent user provided metadata about the data + being inspected. If configured by the job, some key values + may be required. The labels associated with ``Finding``'s + produced by hybrid inspection. + + Label keys must be between 1 and 63 characters long and must + conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + + Label values must be between 0 and 63 characters long and + must conform to the regular expression + ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. + + No more than 10 labels can be associated with a given + finding. + + Examples: + + - ``"environment" : "production"`` + - ``"pipeline" : "etl"`` + """ + + container_details = proto.Field( + proto.MESSAGE, + number=1, + message='Container', + ) + file_offset = proto.Field( + proto.INT64, + number=2, + ) + row_offset = proto.Field( + proto.INT64, + number=3, + ) + table_options = proto.Field( + proto.MESSAGE, + number=4, + message=storage.TableOptions, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + + +class HybridInspectResponse(proto.Message): + r"""Quota exceeded errors will be thrown once quota has been met. """ + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py b/owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py new file mode 100644 index 00000000..9d33cb03 --- /dev/null +++ b/owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py @@ -0,0 +1,1202 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.privacy.dlp.v2', + manifest={ + 'Likelihood', + 'FileType', + 'InfoType', + 'StoredType', + 'CustomInfoType', + 'FieldId', + 'PartitionId', + 'KindExpression', + 'DatastoreOptions', + 'CloudStorageRegexFileSet', + 'CloudStorageOptions', + 'CloudStorageFileSet', + 'CloudStoragePath', + 'BigQueryOptions', + 'StorageConfig', + 'HybridOptions', + 'BigQueryKey', + 'DatastoreKey', + 'Key', + 'RecordKey', + 'BigQueryTable', + 'BigQueryField', + 'EntityId', + 'TableOptions', + }, +) + + +class Likelihood(proto.Enum): + r"""Categorization of results based on how likely they are to + represent a match, based on the number of elements they contain + which imply a match. + """ + LIKELIHOOD_UNSPECIFIED = 0 + VERY_UNLIKELY = 1 + UNLIKELY = 2 + POSSIBLE = 3 + LIKELY = 4 + VERY_LIKELY = 5 + + +class FileType(proto.Enum): + r"""Definitions of file type groups to scan. New types will be + added to this list. + """ + FILE_TYPE_UNSPECIFIED = 0 + BINARY_FILE = 1 + TEXT_FILE = 2 + IMAGE = 3 + WORD = 5 + PDF = 6 + AVRO = 7 + CSV = 8 + TSV = 9 + + +class InfoType(proto.Message): + r"""Type of information detected by the API. + Attributes: + name (str): + Name of the information type. Either a name of your choosing + when creating a CustomInfoType, or one of the names listed + at https://cloud.google.com/dlp/docs/infotypes-reference + when specifying a built-in type. When sending Cloud DLP + results to Data Catalog, infoType names should conform to + the pattern ``[A-Za-z0-9$-_]{1,64}``. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class StoredType(proto.Message): + r"""A reference to a StoredInfoType to use with scanning. + Attributes: + name (str): + Resource name of the requested ``StoredInfoType``, for + example + ``organizations/433245324/storedInfoTypes/432452342`` or + ``projects/project-id/storedInfoTypes/432452342``. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Timestamp indicating when the version of the + ``StoredInfoType`` used for inspection was created. + Output-only field, populated by the system. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + create_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + +class CustomInfoType(proto.Message): + r"""Custom information type provided by the user. Used to find + domain-specific sensitive information configurable to the data + in question. + + Attributes: + info_type (google.cloud.dlp_v2.types.InfoType): + CustomInfoType can either be a new infoType, or an extension + of built-in infoType, when the name matches one of existing + infoTypes and that infoType is specified in + ``InspectContent.info_types`` field. Specifying the latter + adds findings to the one detected by the system. If built-in + info type is not specified in ``InspectContent.info_types`` + list then the name is treated as a custom info type. + likelihood (google.cloud.dlp_v2.types.Likelihood): + Likelihood to return for this CustomInfoType. This base + value can be altered by a detection rule if the finding + meets the criteria specified by the rule. Defaults to + ``VERY_LIKELY`` if not specified. + dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): + A list of phrases to detect as a + CustomInfoType. + regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): + Regular expression based CustomInfoType. + surrogate_type (google.cloud.dlp_v2.types.CustomInfoType.SurrogateType): + Message for detecting output from + deidentification transformations that support + reversing. + stored_type (google.cloud.dlp_v2.types.StoredType): + Load an existing ``StoredInfoType`` resource for use in + ``InspectDataSource``. Not currently supported in + ``InspectContent``. + detection_rules (Sequence[google.cloud.dlp_v2.types.CustomInfoType.DetectionRule]): + Set of detection rules to apply to all findings of this + CustomInfoType. Rules are applied in order that they are + specified. Not supported for the ``surrogate_type`` + CustomInfoType. + exclusion_type (google.cloud.dlp_v2.types.CustomInfoType.ExclusionType): + If set to EXCLUSION_TYPE_EXCLUDE this infoType will not + cause a finding to be returned. It still can be used for + rules matching. + """ + class ExclusionType(proto.Enum): + r"""""" + EXCLUSION_TYPE_UNSPECIFIED = 0 + EXCLUSION_TYPE_EXCLUDE = 1 + + class Dictionary(proto.Message): + r"""Custom information type based on a dictionary of words or phrases. + This can be used to match sensitive information specific to the + data, such as a list of employee IDs or job titles. + + Dictionary words are case-insensitive and all characters other than + letters and digits in the unicode `Basic Multilingual + Plane `__ + will be replaced with whitespace when scanning for matches, so the + dictionary phrase "Sam Johnson" will match all three phrases "sam + johnson", "Sam, Johnson", and "Sam (Johnson)". Additionally, the + characters surrounding any match must be of a different type than + the adjacent characters within the word, so letters must be next to + non-letters and digits next to non-digits. For example, the + dictionary word "jen" will match the first three letters of the text + "jen123" but will return no matches for "jennifer". + + Dictionary words containing a large number of characters that are + not letters or digits may result in unexpected findings because such + characters are treated as whitespace. The + `limits `__ page contains + details about the size limits of dictionaries. For dictionaries that + do not fit within these constraints, consider using + ``LargeCustomDictionaryConfig`` in the ``StoredInfoType`` API. + + Attributes: + word_list (google.cloud.dlp_v2.types.CustomInfoType.Dictionary.WordList): + List of words or phrases to search for. + cloud_storage_path (google.cloud.dlp_v2.types.CloudStoragePath): + Newline-delimited file of words in Cloud + Storage. Only a single file is accepted. + """ + + class WordList(proto.Message): + r"""Message defining a list of words or phrases to search for in + the data. + + Attributes: + words (Sequence[str]): + Words or phrases defining the dictionary. The dictionary + must contain at least one phrase and every phrase must + contain at least 2 characters that are letters or digits. + [required] + """ + + words = proto.RepeatedField( + proto.STRING, + number=1, + ) + + word_list = proto.Field( + proto.MESSAGE, + number=1, + oneof='source', + message='CustomInfoType.Dictionary.WordList', + ) + cloud_storage_path = proto.Field( + proto.MESSAGE, + number=3, + oneof='source', + message='CloudStoragePath', + ) + + class Regex(proto.Message): + r"""Message defining a custom regular expression. + Attributes: + pattern (str): + Pattern defining the regular expression. Its + syntax + (https://github.com/google/re2/wiki/Syntax) can + be found under the google/re2 repository on + GitHub. + group_indexes (Sequence[int]): + The index of the submatch to extract as + findings. When not specified, the entire match + is returned. No more than 3 may be included. + """ + + pattern = proto.Field( + proto.STRING, + number=1, + ) + group_indexes = proto.RepeatedField( + proto.INT32, + number=2, + ) + + class SurrogateType(proto.Message): + r"""Message for detecting output from deidentification transformations + such as + ```CryptoReplaceFfxFpeConfig`` `__. + These types of transformations are those that perform + pseudonymization, thereby producing a "surrogate" as output. This + should be used in conjunction with a field on the transformation + such as ``surrogate_info_type``. This CustomInfoType does not + support the use of ``detection_rules``. + """ + + class DetectionRule(proto.Message): + r"""Deprecated; use ``InspectionRuleSet`` instead. Rule for modifying a + ``CustomInfoType`` to alter behavior under certain circumstances, + depending on the specific details of the rule. Not supported for the + ``surrogate_type`` custom infoType. + + Attributes: + hotword_rule (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.HotwordRule): + Hotword-based detection rule. + """ + + class Proximity(proto.Message): + r"""Message for specifying a window around a finding to apply a + detection rule. + + Attributes: + window_before (int): + Number of characters before the finding to + consider. + window_after (int): + Number of characters after the finding to + consider. + """ + + window_before = proto.Field( + proto.INT32, + number=1, + ) + window_after = proto.Field( + proto.INT32, + number=2, + ) + + class LikelihoodAdjustment(proto.Message): + r"""Message for specifying an adjustment to the likelihood of a + finding as part of a detection rule. + + Attributes: + fixed_likelihood (google.cloud.dlp_v2.types.Likelihood): + Set the likelihood of a finding to a fixed + value. + relative_likelihood (int): + Increase or decrease the likelihood by the specified number + of levels. For example, if a finding would be ``POSSIBLE`` + without the detection rule and ``relative_likelihood`` is 1, + then it is upgraded to ``LIKELY``, while a value of -1 would + downgrade it to ``UNLIKELY``. Likelihood may never drop + below ``VERY_UNLIKELY`` or exceed ``VERY_LIKELY``, so + applying an adjustment of 1 followed by an adjustment of -1 + when base likelihood is ``VERY_LIKELY`` will result in a + final likelihood of ``LIKELY``. + """ + + fixed_likelihood = proto.Field( + proto.ENUM, + number=1, + oneof='adjustment', + enum='Likelihood', + ) + relative_likelihood = proto.Field( + proto.INT32, + number=2, + oneof='adjustment', + ) + + class HotwordRule(proto.Message): + r"""The rule that adjusts the likelihood of findings within a + certain proximity of hotwords. + + Attributes: + hotword_regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): + Regular expression pattern defining what + qualifies as a hotword. + proximity (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.Proximity): + Proximity of the finding within which the + entire hotword must reside. The total length of + the window cannot exceed 1000 characters. Note + that the finding itself will be included in the + window, so that hotwords may be used to match + substrings of the finding itself. For example, + the certainty of a phone number regex "\(\d{3}\) + \d{3}-\d{4}" could be adjusted upwards if the + area code is known to be the local area code of + a company office using the hotword regex + "\(xxx\)", where "xxx" is the area code in + question. + likelihood_adjustment (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.LikelihoodAdjustment): + Likelihood adjustment to apply to all + matching findings. + """ + + hotword_regex = proto.Field( + proto.MESSAGE, + number=1, + message='CustomInfoType.Regex', + ) + proximity = proto.Field( + proto.MESSAGE, + number=2, + message='CustomInfoType.DetectionRule.Proximity', + ) + likelihood_adjustment = proto.Field( + proto.MESSAGE, + number=3, + message='CustomInfoType.DetectionRule.LikelihoodAdjustment', + ) + + hotword_rule = proto.Field( + proto.MESSAGE, + number=1, + oneof='type', + message='CustomInfoType.DetectionRule.HotwordRule', + ) + + info_type = proto.Field( + proto.MESSAGE, + number=1, + message='InfoType', + ) + likelihood = proto.Field( + proto.ENUM, + number=6, + enum='Likelihood', + ) + dictionary = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message=Dictionary, + ) + regex = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message=Regex, + ) + surrogate_type = proto.Field( + proto.MESSAGE, + number=4, + oneof='type', + message=SurrogateType, + ) + stored_type = proto.Field( + proto.MESSAGE, + number=5, + oneof='type', + message='StoredType', + ) + detection_rules = proto.RepeatedField( + proto.MESSAGE, + number=7, + message=DetectionRule, + ) + exclusion_type = proto.Field( + proto.ENUM, + number=8, + enum=ExclusionType, + ) + + +class FieldId(proto.Message): + r"""General identifier of a data field in a storage service. + Attributes: + name (str): + Name describing the field. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class PartitionId(proto.Message): + r"""Datastore partition ID. + A partition ID identifies a grouping of entities. The grouping + is always by project and namespace, however the namespace ID may + be empty. + A partition ID contains several dimensions: + project ID and namespace ID. + + Attributes: + project_id (str): + The ID of the project to which the entities + belong. + namespace_id (str): + If not empty, the ID of the namespace to + which the entities belong. + """ + + project_id = proto.Field( + proto.STRING, + number=2, + ) + namespace_id = proto.Field( + proto.STRING, + number=4, + ) + + +class KindExpression(proto.Message): + r"""A representation of a Datastore kind. + Attributes: + name (str): + The name of the kind. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class DatastoreOptions(proto.Message): + r"""Options defining a data set within Google Cloud Datastore. + Attributes: + partition_id (google.cloud.dlp_v2.types.PartitionId): + A partition ID identifies a grouping of + entities. The grouping is always by project and + namespace, however the namespace ID may be + empty. + kind (google.cloud.dlp_v2.types.KindExpression): + The kind to process. + """ + + partition_id = proto.Field( + proto.MESSAGE, + number=1, + message='PartitionId', + ) + kind = proto.Field( + proto.MESSAGE, + number=2, + message='KindExpression', + ) + + +class CloudStorageRegexFileSet(proto.Message): + r"""Message representing a set of files in a Cloud Storage bucket. + Regular expressions are used to allow fine-grained control over + which files in the bucket to include. + + Included files are those that match at least one item in + ``include_regex`` and do not match any items in ``exclude_regex``. + Note that a file that matches items from both lists will *not* be + included. For a match to occur, the entire file path (i.e., + everything in the url after the bucket name) must match the regular + expression. + + For example, given the input + ``{bucket_name: "mybucket", include_regex: ["directory1/.*"], exclude_regex: ["directory1/excluded.*"]}``: + + - ``gs://mybucket/directory1/myfile`` will be included + - ``gs://mybucket/directory1/directory2/myfile`` will be included + (``.*`` matches across ``/``) + - ``gs://mybucket/directory0/directory1/myfile`` will *not* be + included (the full path doesn't match any items in + ``include_regex``) + - ``gs://mybucket/directory1/excludedfile`` will *not* be included + (the path matches an item in ``exclude_regex``) + + If ``include_regex`` is left empty, it will match all files by + default (this is equivalent to setting ``include_regex: [".*"]``). + + Some other common use cases: + + - ``{bucket_name: "mybucket", exclude_regex: [".*\.pdf"]}`` will + include all files in ``mybucket`` except for .pdf files + - ``{bucket_name: "mybucket", include_regex: ["directory/[^/]+"]}`` + will include all files directly under + ``gs://mybucket/directory/``, without matching across ``/`` + + Attributes: + bucket_name (str): + The name of a Cloud Storage bucket. Required. + include_regex (Sequence[str]): + A list of regular expressions matching file paths to + include. All files in the bucket that match at least one of + these regular expressions will be included in the set of + files, except for those that also match an item in + ``exclude_regex``. Leaving this field empty will match all + files by default (this is equivalent to including ``.*`` in + the list). + + Regular expressions use RE2 + `syntax `__; a + guide can be found under the google/re2 repository on + GitHub. + exclude_regex (Sequence[str]): + A list of regular expressions matching file paths to + exclude. All files in the bucket that match at least one of + these regular expressions will be excluded from the scan. + + Regular expressions use RE2 + `syntax `__; a + guide can be found under the google/re2 repository on + GitHub. + """ + + bucket_name = proto.Field( + proto.STRING, + number=1, + ) + include_regex = proto.RepeatedField( + proto.STRING, + number=2, + ) + exclude_regex = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CloudStorageOptions(proto.Message): + r"""Options defining a file or a set of files within a Google + Cloud Storage bucket. + + Attributes: + file_set (google.cloud.dlp_v2.types.CloudStorageOptions.FileSet): + The set of one or more files to scan. + bytes_limit_per_file (int): + Max number of bytes to scan from a file. If a scanned file's + size is bigger than this value then the rest of the bytes + are omitted. Only one of bytes_limit_per_file and + bytes_limit_per_file_percent can be specified. + bytes_limit_per_file_percent (int): + Max percentage of bytes to scan from a file. The rest are + omitted. The number of bytes scanned is rounded down. Must + be between 0 and 100, inclusively. Both 0 and 100 means no + limit. Defaults to 0. Only one of bytes_limit_per_file and + bytes_limit_per_file_percent can be specified. + file_types (Sequence[google.cloud.dlp_v2.types.FileType]): + List of file type groups to include in the scan. If empty, + all files are scanned and available data format processors + are applied. In addition, the binary content of the selected + files is always scanned as well. Images are scanned only as + binary if the specified region does not support image + inspection and no file_types were specified. Image + inspection is restricted to 'global', 'us', 'asia', and + 'europe'. + sample_method (google.cloud.dlp_v2.types.CloudStorageOptions.SampleMethod): + + files_limit_percent (int): + Limits the number of files to scan to this + percentage of the input FileSet. Number of files + scanned is rounded down. Must be between 0 and + 100, inclusively. Both 0 and 100 means no limit. + Defaults to 0. + """ + class SampleMethod(proto.Enum): + r"""How to sample bytes if not all bytes are scanned. Meaningful only + when used in conjunction with bytes_limit_per_file. If not + specified, scanning would start from the top. + """ + SAMPLE_METHOD_UNSPECIFIED = 0 + TOP = 1 + RANDOM_START = 2 + + class FileSet(proto.Message): + r"""Set of files to scan. + Attributes: + url (str): + The Cloud Storage url of the file(s) to scan, in the format + ``gs:///``. Trailing wildcard in the path is + allowed. + + If the url ends in a trailing slash, the bucket or directory + represented by the url will be scanned non-recursively + (content in sub-directories will not be scanned). This means + that ``gs://mybucket/`` is equivalent to + ``gs://mybucket/*``, and ``gs://mybucket/directory/`` is + equivalent to ``gs://mybucket/directory/*``. + + Exactly one of ``url`` or ``regex_file_set`` must be set. + regex_file_set (google.cloud.dlp_v2.types.CloudStorageRegexFileSet): + The regex-filtered set of files to scan. Exactly one of + ``url`` or ``regex_file_set`` must be set. + """ + + url = proto.Field( + proto.STRING, + number=1, + ) + regex_file_set = proto.Field( + proto.MESSAGE, + number=2, + message='CloudStorageRegexFileSet', + ) + + file_set = proto.Field( + proto.MESSAGE, + number=1, + message=FileSet, + ) + bytes_limit_per_file = proto.Field( + proto.INT64, + number=4, + ) + bytes_limit_per_file_percent = proto.Field( + proto.INT32, + number=8, + ) + file_types = proto.RepeatedField( + proto.ENUM, + number=5, + enum='FileType', + ) + sample_method = proto.Field( + proto.ENUM, + number=6, + enum=SampleMethod, + ) + files_limit_percent = proto.Field( + proto.INT32, + number=7, + ) + + +class CloudStorageFileSet(proto.Message): + r"""Message representing a set of files in Cloud Storage. + Attributes: + url (str): + The url, in the format ``gs:///``. Trailing + wildcard in the path is allowed. + """ + + url = proto.Field( + proto.STRING, + number=1, + ) + + +class CloudStoragePath(proto.Message): + r"""Message representing a single file or path in Cloud Storage. + Attributes: + path (str): + A url representing a file or path (no wildcards) in Cloud + Storage. Example: gs://[BUCKET_NAME]/dictionary.txt + """ + + path = proto.Field( + proto.STRING, + number=1, + ) + + +class BigQueryOptions(proto.Message): + r"""Options defining BigQuery table and row identifiers. + Attributes: + table_reference (google.cloud.dlp_v2.types.BigQueryTable): + Complete BigQuery table reference. + identifying_fields (Sequence[google.cloud.dlp_v2.types.FieldId]): + Table fields that may uniquely identify a row within the + table. When ``actions.saveFindings.outputConfig.table`` is + specified, the values of columns specified here are + available in the output table under + ``location.content_locations.record_location.record_key.id_values``. + Nested fields such as ``person.birthdate.year`` are allowed. + rows_limit (int): + Max number of rows to scan. If the table has more rows than + this value, the rest of the rows are omitted. If not set, or + if set to 0, all rows will be scanned. Only one of + rows_limit and rows_limit_percent can be specified. Cannot + be used in conjunction with TimespanConfig. + rows_limit_percent (int): + Max percentage of rows to scan. The rest are omitted. The + number of rows scanned is rounded down. Must be between 0 + and 100, inclusively. Both 0 and 100 means no limit. + Defaults to 0. Only one of rows_limit and rows_limit_percent + can be specified. Cannot be used in conjunction with + TimespanConfig. + sample_method (google.cloud.dlp_v2.types.BigQueryOptions.SampleMethod): + + excluded_fields (Sequence[google.cloud.dlp_v2.types.FieldId]): + References to fields excluded from scanning. + This allows you to skip inspection of entire + columns which you know have no findings. + """ + class SampleMethod(proto.Enum): + r"""How to sample rows if not all rows are scanned. Meaningful only when + used in conjunction with either rows_limit or rows_limit_percent. If + not specified, rows are scanned in the order BigQuery reads them. + """ + SAMPLE_METHOD_UNSPECIFIED = 0 + TOP = 1 + RANDOM_START = 2 + + table_reference = proto.Field( + proto.MESSAGE, + number=1, + message='BigQueryTable', + ) + identifying_fields = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='FieldId', + ) + rows_limit = proto.Field( + proto.INT64, + number=3, + ) + rows_limit_percent = proto.Field( + proto.INT32, + number=6, + ) + sample_method = proto.Field( + proto.ENUM, + number=4, + enum=SampleMethod, + ) + excluded_fields = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='FieldId', + ) + + +class StorageConfig(proto.Message): + r"""Shared message indicating Cloud storage type. + Attributes: + datastore_options (google.cloud.dlp_v2.types.DatastoreOptions): + Google Cloud Datastore options. + cloud_storage_options (google.cloud.dlp_v2.types.CloudStorageOptions): + Google Cloud Storage options. + big_query_options (google.cloud.dlp_v2.types.BigQueryOptions): + BigQuery options. + hybrid_options (google.cloud.dlp_v2.types.HybridOptions): + Hybrid inspection options. + Early access feature is in a pre-release state + and might change or have limited support. For + more information, see + https://cloud.google.com/products#product- + launch-stages. + timespan_config (google.cloud.dlp_v2.types.StorageConfig.TimespanConfig): + + """ + + class TimespanConfig(proto.Message): + r"""Configuration of the timespan of the items to include in + scanning. Currently only supported when inspecting Google Cloud + Storage and BigQuery. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Exclude files, tables, or rows older than + this value. If not set, no lower time limit is + applied. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Exclude files, tables, or rows newer than + this value. If not set, no upper time limit is + applied. + timestamp_field (google.cloud.dlp_v2.types.FieldId): + Specification of the field containing the timestamp of + scanned items. Used for data sources like Datastore and + BigQuery. + + For BigQuery: If this value is not specified and the table + was modified between the given start and end times, the + entire table will be scanned. If this value is specified, + then rows are filtered based on the given start and end + times. Rows with a ``NULL`` value in the provided BigQuery + column are skipped. Valid data types of the provided + BigQuery column are: ``INTEGER``, ``DATE``, ``TIMESTAMP``, + and ``DATETIME``. + + For Datastore: If this value is specified, then entities are + filtered based on the given start and end times. If an + entity does not contain the provided timestamp property or + contains empty or invalid values, then it is included. Valid + data types of the provided timestamp property are: + ``TIMESTAMP``. + enable_auto_population_of_timespan_config (bool): + When the job is started by a JobTrigger we will + automatically figure out a valid start_time to avoid + scanning files that have not been modified since the last + time the JobTrigger executed. This will be based on the time + of the execution of the last run of the JobTrigger. + """ + + start_time = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + timestamp_field = proto.Field( + proto.MESSAGE, + number=3, + message='FieldId', + ) + enable_auto_population_of_timespan_config = proto.Field( + proto.BOOL, + number=4, + ) + + datastore_options = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message='DatastoreOptions', + ) + cloud_storage_options = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message='CloudStorageOptions', + ) + big_query_options = proto.Field( + proto.MESSAGE, + number=4, + oneof='type', + message='BigQueryOptions', + ) + hybrid_options = proto.Field( + proto.MESSAGE, + number=9, + oneof='type', + message='HybridOptions', + ) + timespan_config = proto.Field( + proto.MESSAGE, + number=6, + message=TimespanConfig, + ) + + +class HybridOptions(proto.Message): + r"""Configuration to control jobs where the content being + inspected is outside of Google Cloud Platform. + + Attributes: + description (str): + A short description of where the data is + coming from. Will be stored once in the job. 256 + max length. + required_finding_label_keys (Sequence[str]): + These are labels that each inspection request must include + within their 'finding_labels' map. Request may contain + others, but any missing one of these will be rejected. + + Label keys must be between 1 and 63 characters long and must + conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + + No more than 10 keys can be required. + labels (Sequence[google.cloud.dlp_v2.types.HybridOptions.LabelsEntry]): + To organize findings, these labels will be added to each + finding. + + Label keys must be between 1 and 63 characters long and must + conform to the following regular expression: + ``[a-z]([-a-z0-9]*[a-z0-9])?``. + + Label values must be between 0 and 63 characters long and + must conform to the regular expression + ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. + + No more than 10 labels can be associated with a given + finding. + + Examples: + + - ``"environment" : "production"`` + - ``"pipeline" : "etl"`` + table_options (google.cloud.dlp_v2.types.TableOptions): + If the container is a table, additional + information to make findings meaningful such as + the columns that are primary keys. + """ + + description = proto.Field( + proto.STRING, + number=1, + ) + required_finding_label_keys = proto.RepeatedField( + proto.STRING, + number=2, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + table_options = proto.Field( + proto.MESSAGE, + number=4, + message='TableOptions', + ) + + +class BigQueryKey(proto.Message): + r"""Row key for identifying a record in BigQuery table. + Attributes: + table_reference (google.cloud.dlp_v2.types.BigQueryTable): + Complete BigQuery table reference. + row_number (int): + Row number inferred at the time the table was scanned. This + value is nondeterministic, cannot be queried, and may be + null for inspection jobs. To locate findings within a table, + specify + ``inspect_job.storage_config.big_query_options.identifying_fields`` + in ``CreateDlpJobRequest``. + """ + + table_reference = proto.Field( + proto.MESSAGE, + number=1, + message='BigQueryTable', + ) + row_number = proto.Field( + proto.INT64, + number=2, + ) + + +class DatastoreKey(proto.Message): + r"""Record key for a finding in Cloud Datastore. + Attributes: + entity_key (google.cloud.dlp_v2.types.Key): + Datastore entity key. + """ + + entity_key = proto.Field( + proto.MESSAGE, + number=1, + message='Key', + ) + + +class Key(proto.Message): + r"""A unique identifier for a Datastore entity. + If a key's partition ID or any of its path kinds or names are + reserved/read-only, the key is reserved/read-only. + A reserved/read-only key is forbidden in certain documented + contexts. + + Attributes: + partition_id (google.cloud.dlp_v2.types.PartitionId): + Entities are partitioned into subsets, + currently identified by a project ID and + namespace ID. Queries are scoped to a single + partition. + path (Sequence[google.cloud.dlp_v2.types.Key.PathElement]): + The entity path. An entity path consists of one or more + elements composed of a kind and a string or numerical + identifier, which identify entities. The first element + identifies a *root entity*, the second element identifies a + *child* of the root entity, the third element identifies a + child of the second entity, and so forth. The entities + identified by all prefixes of the path are called the + element's *ancestors*. + + A path can never be empty, and a path can have at most 100 + elements. + """ + + class PathElement(proto.Message): + r"""A (kind, ID/name) pair used to construct a key path. + If either name or ID is set, the element is complete. If neither + is set, the element is incomplete. + + Attributes: + kind (str): + The kind of the entity. A kind matching regex ``__.*__`` is + reserved/read-only. A kind must not contain more than 1500 + bytes when UTF-8 encoded. Cannot be ``""``. + id (int): + The auto-allocated ID of the entity. + Never equal to zero. Values less than zero are + discouraged and may not be supported in the + future. + name (str): + The name of the entity. A name matching regex ``__.*__`` is + reserved/read-only. A name must not be more than 1500 bytes + when UTF-8 encoded. Cannot be ``""``. + """ + + kind = proto.Field( + proto.STRING, + number=1, + ) + id = proto.Field( + proto.INT64, + number=2, + oneof='id_type', + ) + name = proto.Field( + proto.STRING, + number=3, + oneof='id_type', + ) + + partition_id = proto.Field( + proto.MESSAGE, + number=1, + message='PartitionId', + ) + path = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=PathElement, + ) + + +class RecordKey(proto.Message): + r"""Message for a unique key indicating a record that contains a + finding. + + Attributes: + datastore_key (google.cloud.dlp_v2.types.DatastoreKey): + + big_query_key (google.cloud.dlp_v2.types.BigQueryKey): + + id_values (Sequence[str]): + Values of identifying columns in the given row. Order of + values matches the order of ``identifying_fields`` specified + in the scanning request. + """ + + datastore_key = proto.Field( + proto.MESSAGE, + number=2, + oneof='type', + message='DatastoreKey', + ) + big_query_key = proto.Field( + proto.MESSAGE, + number=3, + oneof='type', + message='BigQueryKey', + ) + id_values = proto.RepeatedField( + proto.STRING, + number=5, + ) + + +class BigQueryTable(proto.Message): + r"""Message defining the location of a BigQuery table. A table is + uniquely identified by its project_id, dataset_id, and table_name. + Within a query a table is often referenced with a string in the + format of: ``:.`` or + ``..``. + + Attributes: + project_id (str): + The Google Cloud Platform project ID of the + project containing the table. If omitted, + project ID is inferred from the API call. + dataset_id (str): + Dataset ID of the table. + table_id (str): + Name of the table. + """ + + project_id = proto.Field( + proto.STRING, + number=1, + ) + dataset_id = proto.Field( + proto.STRING, + number=2, + ) + table_id = proto.Field( + proto.STRING, + number=3, + ) + + +class BigQueryField(proto.Message): + r"""Message defining a field of a BigQuery table. + Attributes: + table (google.cloud.dlp_v2.types.BigQueryTable): + Source table of the field. + field (google.cloud.dlp_v2.types.FieldId): + Designated field in the BigQuery table. + """ + + table = proto.Field( + proto.MESSAGE, + number=1, + message='BigQueryTable', + ) + field = proto.Field( + proto.MESSAGE, + number=2, + message='FieldId', + ) + + +class EntityId(proto.Message): + r"""An entity in a dataset is a field or set of fields that correspond + to a single person. For example, in medical records the ``EntityId`` + might be a patient identifier, or for financial records it might be + an account identifier. This message is used when generalizations or + analysis must take into account that multiple rows correspond to the + same entity. + + Attributes: + field (google.cloud.dlp_v2.types.FieldId): + Composite key indicating which field contains + the entity identifier. + """ + + field = proto.Field( + proto.MESSAGE, + number=1, + message='FieldId', + ) + + +class TableOptions(proto.Message): + r"""Instructions regarding the table content being inspected. + Attributes: + identifying_fields (Sequence[google.cloud.dlp_v2.types.FieldId]): + The columns that are the primary keys for + table objects included in ContentItem. A copy of + this cell's value will stored alongside + alongside each finding so that the finding can + be traced to the specific row it came from. No + more than 3 may be provided. + """ + + identifying_fields = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='FieldId', + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/mypy.ini b/owl-bot-staging/v2/mypy.ini new file mode 100644 index 00000000..4505b485 --- /dev/null +++ b/owl-bot-staging/v2/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.6 +namespace_packages = True diff --git a/owl-bot-staging/v2/noxfile.py b/owl-bot-staging/v2/noxfile.py new file mode 100644 index 00000000..f4db75d0 --- /dev/null +++ b/owl-bot-staging/v2/noxfile.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", +] + +@nox.session(python=['3.6', '3.7', '3.8', '3.9']) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/dlp_v2/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python='3.7') +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=['3.6', '3.7']) +def mypy(session): + """Run the type checker.""" + session.install('mypy', 'types-pkg_resources') + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python='3.6') +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx<3.0.0", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py b/owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py new file mode 100644 index 00000000..82a5fabe --- /dev/null +++ b/owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py @@ -0,0 +1,209 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class dlpCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'activate_job_trigger': ('name', ), + 'cancel_dlp_job': ('name', ), + 'create_deidentify_template': ('parent', 'deidentify_template', 'template_id', 'location_id', ), + 'create_dlp_job': ('parent', 'inspect_job', 'risk_job', 'job_id', 'location_id', ), + 'create_inspect_template': ('parent', 'inspect_template', 'template_id', 'location_id', ), + 'create_job_trigger': ('parent', 'job_trigger', 'trigger_id', 'location_id', ), + 'create_stored_info_type': ('parent', 'config', 'stored_info_type_id', 'location_id', ), + 'deidentify_content': ('parent', 'deidentify_config', 'inspect_config', 'item', 'inspect_template_name', 'deidentify_template_name', 'location_id', ), + 'delete_deidentify_template': ('name', ), + 'delete_dlp_job': ('name', ), + 'delete_inspect_template': ('name', ), + 'delete_job_trigger': ('name', ), + 'delete_stored_info_type': ('name', ), + 'finish_dlp_job': ('name', ), + 'get_deidentify_template': ('name', ), + 'get_dlp_job': ('name', ), + 'get_inspect_template': ('name', ), + 'get_job_trigger': ('name', ), + 'get_stored_info_type': ('name', ), + 'hybrid_inspect_dlp_job': ('name', 'hybrid_item', ), + 'hybrid_inspect_job_trigger': ('name', 'hybrid_item', ), + 'inspect_content': ('parent', 'inspect_config', 'item', 'inspect_template_name', 'location_id', ), + 'list_deidentify_templates': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), + 'list_dlp_jobs': ('parent', 'filter', 'page_size', 'page_token', 'type_', 'order_by', 'location_id', ), + 'list_info_types': ('parent', 'language_code', 'filter', 'location_id', ), + 'list_inspect_templates': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), + 'list_job_triggers': ('parent', 'page_token', 'page_size', 'order_by', 'filter', 'location_id', ), + 'list_stored_info_types': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), + 'redact_image': ('parent', 'location_id', 'inspect_config', 'image_redaction_configs', 'include_findings', 'byte_item', ), + 'reidentify_content': ('parent', 'reidentify_config', 'inspect_config', 'item', 'inspect_template_name', 'reidentify_template_name', 'location_id', ), + 'update_deidentify_template': ('name', 'deidentify_template', 'update_mask', ), + 'update_inspect_template': ('name', 'inspect_template', 'update_mask', ), + 'update_job_trigger': ('name', 'job_trigger', 'update_mask', ), + 'update_stored_info_type': ('name', 'config', 'update_mask', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=dlpCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the dlp client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v2/setup.py b/owl-bot-staging/v2/setup.py new file mode 100644 index 00000000..2beca4f9 --- /dev/null +++ b/owl-bot-staging/v2/setup.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import setuptools # type: ignore + +version = '0.1.0' + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, 'README.rst') +with io.open(readme_filename, encoding='utf-8') as readme_file: + readme = readme_file.read() + +setuptools.setup( + name='google-cloud-dlp', + version=version, + long_description=readme, + packages=setuptools.PEP420PackageFinder.find(), + namespace_packages=('google', 'google.cloud'), + platforms='Posix; MacOS X; Windows', + include_package_data=True, + install_requires=( + 'google-api-core[grpc] >= 1.27.0, < 2.0.0dev', + 'libcst >= 0.2.5', + 'proto-plus >= 1.15.0', + 'packaging >= 14.3', ), + python_requires='>=3.6', + classifiers=[ + 'Development Status :: 3 - Alpha', + 'Intended Audience :: Developers', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Topic :: Internet', + 'Topic :: Software Development :: Libraries :: Python Modules', + ], + zip_safe=False, +) diff --git a/owl-bot-staging/v2/tests/__init__.py b/owl-bot-staging/v2/tests/__init__.py new file mode 100644 index 00000000..b54a5fcc --- /dev/null +++ b/owl-bot-staging/v2/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/tests/unit/__init__.py b/owl-bot-staging/v2/tests/unit/__init__.py new file mode 100644 index 00000000..b54a5fcc --- /dev/null +++ b/owl-bot-staging/v2/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/tests/unit/gapic/__init__.py b/owl-bot-staging/v2/tests/unit/gapic/__init__.py new file mode 100644 index 00000000..b54a5fcc --- /dev/null +++ b/owl-bot-staging/v2/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py b/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py new file mode 100644 index 00000000..b54a5fcc --- /dev/null +++ b/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py b/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py new file mode 100644 index 00000000..c8e0b643 --- /dev/null +++ b/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py @@ -0,0 +1,9393 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock +import packaging.version + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.dlp_v2.services.dlp_service import DlpServiceAsyncClient +from google.cloud.dlp_v2.services.dlp_service import DlpServiceClient +from google.cloud.dlp_v2.services.dlp_service import pagers +from google.cloud.dlp_v2.services.dlp_service import transports +from google.cloud.dlp_v2.services.dlp_service.transports.base import _GOOGLE_AUTH_VERSION +from google.cloud.dlp_v2.types import dlp +from google.cloud.dlp_v2.types import storage +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import field_mask_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from google.type import date_pb2 # type: ignore +from google.type import dayofweek_pb2 # type: ignore +from google.type import timeofday_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DlpServiceClient._get_default_mtls_endpoint(None) is None + assert DlpServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert DlpServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert DlpServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert DlpServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert DlpServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [ + DlpServiceClient, + DlpServiceAsyncClient, +]) +def test_dlp_service_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'dlp.googleapis.com:443' + + +@pytest.mark.parametrize("client_class", [ + DlpServiceClient, + DlpServiceAsyncClient, +]) +def test_dlp_service_client_service_account_always_use_jwt(client_class): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + client = client_class(credentials=creds) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.DlpServiceGrpcTransport, "grpc"), + (transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_dlp_service_client_service_account_always_use_jwt_true(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + +@pytest.mark.parametrize("client_class", [ + DlpServiceClient, + DlpServiceAsyncClient, +]) +def test_dlp_service_client_from_service_account_file(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'dlp.googleapis.com:443' + + +def test_dlp_service_client_get_transport_class(): + transport = DlpServiceClient.get_transport_class() + available_transports = [ + transports.DlpServiceGrpcTransport, + ] + assert transport in available_transports + + transport = DlpServiceClient.get_transport_class("grpc") + assert transport == transports.DlpServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc"), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(DlpServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceClient)) +@mock.patch.object(DlpServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceAsyncClient)) +def test_dlp_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DlpServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DlpServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", "true"), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", "false"), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(DlpServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceClient)) +@mock.patch.object(DlpServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_dlp_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc"), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_dlp_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc"), + (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_dlp_service_client_client_options_credentials_file(client_class, transport_class, transport_name): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_dlp_service_client_client_options_from_dict(): + with mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = DlpServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + ) + + +def test_inspect_content(transport: str = 'grpc', request_type=dlp.InspectContentRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.inspect_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectContentResponse( + ) + response = client.inspect_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.InspectContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectContentResponse) + + +def test_inspect_content_from_dict(): + test_inspect_content(request_type=dict) + + +def test_inspect_content_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.inspect_content), + '__call__') as call: + client.inspect_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.InspectContentRequest() + + +@pytest.mark.asyncio +async def test_inspect_content_async(transport: str = 'grpc_asyncio', request_type=dlp.InspectContentRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.inspect_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectContentResponse( + )) + response = await client.inspect_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.InspectContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectContentResponse) + + +@pytest.mark.asyncio +async def test_inspect_content_async_from_dict(): + await test_inspect_content_async(request_type=dict) + + +def test_inspect_content_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.InspectContentRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.inspect_content), + '__call__') as call: + call.return_value = dlp.InspectContentResponse() + client.inspect_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_inspect_content_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.InspectContentRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.inspect_content), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectContentResponse()) + await client.inspect_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_redact_image(transport: str = 'grpc', request_type=dlp.RedactImageRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.redact_image), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.RedactImageResponse( + redacted_image=b'redacted_image_blob', + extracted_text='extracted_text_value', + ) + response = client.redact_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.RedactImageRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.RedactImageResponse) + assert response.redacted_image == b'redacted_image_blob' + assert response.extracted_text == 'extracted_text_value' + + +def test_redact_image_from_dict(): + test_redact_image(request_type=dict) + + +def test_redact_image_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.redact_image), + '__call__') as call: + client.redact_image() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.RedactImageRequest() + + +@pytest.mark.asyncio +async def test_redact_image_async(transport: str = 'grpc_asyncio', request_type=dlp.RedactImageRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.redact_image), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.RedactImageResponse( + redacted_image=b'redacted_image_blob', + extracted_text='extracted_text_value', + )) + response = await client.redact_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.RedactImageRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.RedactImageResponse) + assert response.redacted_image == b'redacted_image_blob' + assert response.extracted_text == 'extracted_text_value' + + +@pytest.mark.asyncio +async def test_redact_image_async_from_dict(): + await test_redact_image_async(request_type=dict) + + +def test_redact_image_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.RedactImageRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.redact_image), + '__call__') as call: + call.return_value = dlp.RedactImageResponse() + client.redact_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_redact_image_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.RedactImageRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.redact_image), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.RedactImageResponse()) + await client.redact_image(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_deidentify_content(transport: str = 'grpc', request_type=dlp.DeidentifyContentRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyContentResponse( + ) + response = client.deidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeidentifyContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyContentResponse) + + +def test_deidentify_content_from_dict(): + test_deidentify_content(request_type=dict) + + +def test_deidentify_content_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), + '__call__') as call: + client.deidentify_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeidentifyContentRequest() + + +@pytest.mark.asyncio +async def test_deidentify_content_async(transport: str = 'grpc_asyncio', request_type=dlp.DeidentifyContentRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyContentResponse( + )) + response = await client.deidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeidentifyContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyContentResponse) + + +@pytest.mark.asyncio +async def test_deidentify_content_async_from_dict(): + await test_deidentify_content_async(request_type=dict) + + +def test_deidentify_content_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeidentifyContentRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), + '__call__') as call: + call.return_value = dlp.DeidentifyContentResponse() + client.deidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_deidentify_content_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeidentifyContentRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deidentify_content), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyContentResponse()) + await client.deidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_reidentify_content(transport: str = 'grpc', request_type=dlp.ReidentifyContentRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ReidentifyContentResponse( + ) + response = client.reidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ReidentifyContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ReidentifyContentResponse) + + +def test_reidentify_content_from_dict(): + test_reidentify_content(request_type=dict) + + +def test_reidentify_content_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), + '__call__') as call: + client.reidentify_content() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ReidentifyContentRequest() + + +@pytest.mark.asyncio +async def test_reidentify_content_async(transport: str = 'grpc_asyncio', request_type=dlp.ReidentifyContentRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ReidentifyContentResponse( + )) + response = await client.reidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ReidentifyContentRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ReidentifyContentResponse) + + +@pytest.mark.asyncio +async def test_reidentify_content_async_from_dict(): + await test_reidentify_content_async(request_type=dict) + + +def test_reidentify_content_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ReidentifyContentRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), + '__call__') as call: + call.return_value = dlp.ReidentifyContentResponse() + client.reidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_reidentify_content_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ReidentifyContentRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.reidentify_content), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ReidentifyContentResponse()) + await client.reidentify_content(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_info_types(transport: str = 'grpc', request_type=dlp.ListInfoTypesRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInfoTypesResponse( + ) + response = client.list_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInfoTypesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ListInfoTypesResponse) + + +def test_list_info_types_from_dict(): + test_list_info_types(request_type=dict) + + +def test_list_info_types_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_info_types), + '__call__') as call: + client.list_info_types() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInfoTypesRequest() + + +@pytest.mark.asyncio +async def test_list_info_types_async(transport: str = 'grpc_asyncio', request_type=dlp.ListInfoTypesRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInfoTypesResponse( + )) + response = await client.list_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInfoTypesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.ListInfoTypesResponse) + + +@pytest.mark.asyncio +async def test_list_info_types_async_from_dict(): + await test_list_info_types_async(request_type=dict) + + +def test_list_info_types_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInfoTypesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_info_types( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_info_types_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_info_types( + dlp.ListInfoTypesRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_info_types_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInfoTypesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInfoTypesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_info_types( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_info_types_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_info_types( + dlp.ListInfoTypesRequest(), + parent='parent_value', + ) + + +def test_create_inspect_template(transport: str = 'grpc', request_type=dlp.CreateInspectTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.create_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_create_inspect_template_from_dict(): + test_create_inspect_template(request_type=dict) + + +def test_create_inspect_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + client.create_inspect_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateInspectTemplateRequest() + + +@pytest.mark.asyncio +async def test_create_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateInspectTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.create_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_create_inspect_template_async_from_dict(): + await test_create_inspect_template_async(request_type=dict) + + +def test_create_inspect_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateInspectTemplateRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + call.return_value = dlp.InspectTemplate() + client.create_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_inspect_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateInspectTemplateRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + await client.create_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_create_inspect_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_inspect_template( + parent='parent_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].inspect_template == dlp.InspectTemplate(name='name_value') + + +def test_create_inspect_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_inspect_template( + dlp.CreateInspectTemplateRequest(), + parent='parent_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_create_inspect_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_inspect_template( + parent='parent_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].inspect_template == dlp.InspectTemplate(name='name_value') + + +@pytest.mark.asyncio +async def test_create_inspect_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_inspect_template( + dlp.CreateInspectTemplateRequest(), + parent='parent_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + ) + + +def test_update_inspect_template(transport: str = 'grpc', request_type=dlp.UpdateInspectTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.update_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_update_inspect_template_from_dict(): + test_update_inspect_template(request_type=dict) + + +def test_update_inspect_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + client.update_inspect_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateInspectTemplateRequest() + + +@pytest.mark.asyncio +async def test_update_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateInspectTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.update_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_update_inspect_template_async_from_dict(): + await test_update_inspect_template_async(request_type=dict) + + +def test_update_inspect_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateInspectTemplateRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + call.return_value = dlp.InspectTemplate() + client.update_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_inspect_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateInspectTemplateRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + await client.update_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_update_inspect_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_inspect_template( + name='name_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].inspect_template == dlp.InspectTemplate(name='name_value') + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + + +def test_update_inspect_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_inspect_template( + dlp.UpdateInspectTemplateRequest(), + name='name_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.asyncio +async def test_update_inspect_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_inspect_template( + name='name_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].inspect_template == dlp.InspectTemplate(name='name_value') + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + + +@pytest.mark.asyncio +async def test_update_inspect_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_inspect_template( + dlp.UpdateInspectTemplateRequest(), + name='name_value', + inspect_template=dlp.InspectTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_get_inspect_template(transport: str = 'grpc', request_type=dlp.GetInspectTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.get_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_get_inspect_template_from_dict(): + test_get_inspect_template(request_type=dict) + + +def test_get_inspect_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + client.get_inspect_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetInspectTemplateRequest() + + +@pytest.mark.asyncio +async def test_get_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.GetInspectTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.get_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.InspectTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_get_inspect_template_async_from_dict(): + await test_get_inspect_template_async(request_type=dict) + + +def test_get_inspect_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetInspectTemplateRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + call.return_value = dlp.InspectTemplate() + client.get_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_inspect_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetInspectTemplateRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + await client.get_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_inspect_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_inspect_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_inspect_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_inspect_template( + dlp.GetInspectTemplateRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_inspect_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.InspectTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_inspect_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_inspect_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_inspect_template( + dlp.GetInspectTemplateRequest(), + name='name_value', + ) + + +def test_list_inspect_templates(transport: str = 'grpc', request_type=dlp.ListInspectTemplatesRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInspectTemplatesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_inspect_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInspectTemplatesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInspectTemplatesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_inspect_templates_from_dict(): + test_list_inspect_templates(request_type=dict) + + +def test_list_inspect_templates_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + client.list_inspect_templates() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInspectTemplatesRequest() + + +@pytest.mark.asyncio +async def test_list_inspect_templates_async(transport: str = 'grpc_asyncio', request_type=dlp.ListInspectTemplatesRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_inspect_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListInspectTemplatesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListInspectTemplatesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_inspect_templates_async_from_dict(): + await test_list_inspect_templates_async(request_type=dict) + + +def test_list_inspect_templates_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListInspectTemplatesRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + call.return_value = dlp.ListInspectTemplatesResponse() + client.list_inspect_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_inspect_templates_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListInspectTemplatesRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse()) + await client.list_inspect_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_inspect_templates_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInspectTemplatesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_inspect_templates( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_inspect_templates_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_inspect_templates( + dlp.ListInspectTemplatesRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_inspect_templates_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListInspectTemplatesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_inspect_templates( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_inspect_templates_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_inspect_templates( + dlp.ListInspectTemplatesRequest(), + parent='parent_value', + ) + + +def test_list_inspect_templates_pager(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token='abc', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], + next_page_token='def', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_inspect_templates(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, dlp.InspectTemplate) + for i in results) + +def test_list_inspect_templates_pages(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token='abc', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], + next_page_token='def', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + ), + RuntimeError, + ) + pages = list(client.list_inspect_templates(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_inspect_templates_async_pager(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token='abc', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], + next_page_token='def', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_inspect_templates(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.InspectTemplate) + for i in responses) + +@pytest.mark.asyncio +async def test_list_inspect_templates_async_pages(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_inspect_templates), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + next_page_token='abc', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[], + next_page_token='def', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListInspectTemplatesResponse( + inspect_templates=[ + dlp.InspectTemplate(), + dlp.InspectTemplate(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_inspect_templates(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_delete_inspect_template(transport: str = 'grpc', request_type=dlp.DeleteInspectTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_inspect_template_from_dict(): + test_delete_inspect_template(request_type=dict) + + +def test_delete_inspect_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + client.delete_inspect_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteInspectTemplateRequest() + + +@pytest.mark.asyncio +async def test_delete_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteInspectTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteInspectTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_inspect_template_async_from_dict(): + await test_delete_inspect_template_async(request_type=dict) + + +def test_delete_inspect_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteInspectTemplateRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + call.return_value = None + client.delete_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_inspect_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteInspectTemplateRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_inspect_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_inspect_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_inspect_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_delete_inspect_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_inspect_template( + dlp.DeleteInspectTemplateRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_inspect_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_inspect_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_inspect_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_delete_inspect_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_inspect_template( + dlp.DeleteInspectTemplateRequest(), + name='name_value', + ) + + +def test_create_deidentify_template(transport: str = 'grpc', request_type=dlp.CreateDeidentifyTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.create_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_create_deidentify_template_from_dict(): + test_create_deidentify_template(request_type=dict) + + +def test_create_deidentify_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + client.create_deidentify_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDeidentifyTemplateRequest() + + +@pytest.mark.asyncio +async def test_create_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateDeidentifyTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.create_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_create_deidentify_template_async_from_dict(): + await test_create_deidentify_template_async(request_type=dict) + + +def test_create_deidentify_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateDeidentifyTemplateRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + call.return_value = dlp.DeidentifyTemplate() + client.create_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_deidentify_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateDeidentifyTemplateRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) + await client.create_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_create_deidentify_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_deidentify_template( + parent='parent_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].deidentify_template == dlp.DeidentifyTemplate(name='name_value') + + +def test_create_deidentify_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_deidentify_template( + dlp.CreateDeidentifyTemplateRequest(), + parent='parent_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_create_deidentify_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_deidentify_template( + parent='parent_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].deidentify_template == dlp.DeidentifyTemplate(name='name_value') + + +@pytest.mark.asyncio +async def test_create_deidentify_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_deidentify_template( + dlp.CreateDeidentifyTemplateRequest(), + parent='parent_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + ) + + +def test_update_deidentify_template(transport: str = 'grpc', request_type=dlp.UpdateDeidentifyTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.update_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_update_deidentify_template_from_dict(): + test_update_deidentify_template(request_type=dict) + + +def test_update_deidentify_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + client.update_deidentify_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateDeidentifyTemplateRequest() + + +@pytest.mark.asyncio +async def test_update_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateDeidentifyTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.update_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_update_deidentify_template_async_from_dict(): + await test_update_deidentify_template_async(request_type=dict) + + +def test_update_deidentify_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateDeidentifyTemplateRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + call.return_value = dlp.DeidentifyTemplate() + client.update_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_deidentify_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateDeidentifyTemplateRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) + await client.update_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_update_deidentify_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_deidentify_template( + name='name_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].deidentify_template == dlp.DeidentifyTemplate(name='name_value') + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + + +def test_update_deidentify_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_deidentify_template( + dlp.UpdateDeidentifyTemplateRequest(), + name='name_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.asyncio +async def test_update_deidentify_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_deidentify_template( + name='name_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].deidentify_template == dlp.DeidentifyTemplate(name='name_value') + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + + +@pytest.mark.asyncio +async def test_update_deidentify_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_deidentify_template( + dlp.UpdateDeidentifyTemplateRequest(), + name='name_value', + deidentify_template=dlp.DeidentifyTemplate(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_get_deidentify_template(transport: str = 'grpc', request_type=dlp.GetDeidentifyTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + ) + response = client.get_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +def test_get_deidentify_template_from_dict(): + test_get_deidentify_template(request_type=dict) + + +def test_get_deidentify_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + client.get_deidentify_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDeidentifyTemplateRequest() + + +@pytest.mark.asyncio +async def test_get_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.GetDeidentifyTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( + name='name_value', + display_name='display_name_value', + description='description_value', + )) + response = await client.get_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DeidentifyTemplate) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + + +@pytest.mark.asyncio +async def test_get_deidentify_template_async_from_dict(): + await test_get_deidentify_template_async(request_type=dict) + + +def test_get_deidentify_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetDeidentifyTemplateRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + call.return_value = dlp.DeidentifyTemplate() + client.get_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_deidentify_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetDeidentifyTemplateRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) + await client.get_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_deidentify_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_deidentify_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_deidentify_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_deidentify_template( + dlp.GetDeidentifyTemplateRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_deidentify_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DeidentifyTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_deidentify_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_deidentify_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_deidentify_template( + dlp.GetDeidentifyTemplateRequest(), + name='name_value', + ) + + +def test_list_deidentify_templates(transport: str = 'grpc', request_type=dlp.ListDeidentifyTemplatesRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDeidentifyTemplatesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_deidentify_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDeidentifyTemplatesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeidentifyTemplatesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_deidentify_templates_from_dict(): + test_list_deidentify_templates(request_type=dict) + + +def test_list_deidentify_templates_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + client.list_deidentify_templates() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDeidentifyTemplatesRequest() + + +@pytest.mark.asyncio +async def test_list_deidentify_templates_async(transport: str = 'grpc_asyncio', request_type=dlp.ListDeidentifyTemplatesRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_deidentify_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDeidentifyTemplatesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeidentifyTemplatesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_deidentify_templates_async_from_dict(): + await test_list_deidentify_templates_async(request_type=dict) + + +def test_list_deidentify_templates_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListDeidentifyTemplatesRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + call.return_value = dlp.ListDeidentifyTemplatesResponse() + client.list_deidentify_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_deidentify_templates_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListDeidentifyTemplatesRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse()) + await client.list_deidentify_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_deidentify_templates_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDeidentifyTemplatesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_deidentify_templates( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_deidentify_templates_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_deidentify_templates( + dlp.ListDeidentifyTemplatesRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_deidentify_templates_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDeidentifyTemplatesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_deidentify_templates( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_deidentify_templates_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_deidentify_templates( + dlp.ListDeidentifyTemplatesRequest(), + parent='parent_value', + ) + + +def test_list_deidentify_templates_pager(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token='abc', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], + next_page_token='def', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_deidentify_templates(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, dlp.DeidentifyTemplate) + for i in results) + +def test_list_deidentify_templates_pages(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token='abc', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], + next_page_token='def', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + RuntimeError, + ) + pages = list(client.list_deidentify_templates(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_deidentify_templates_async_pager(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token='abc', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], + next_page_token='def', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_deidentify_templates(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.DeidentifyTemplate) + for i in responses) + +@pytest.mark.asyncio +async def test_list_deidentify_templates_async_pages(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deidentify_templates), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + next_page_token='abc', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[], + next_page_token='def', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + ], + next_page_token='ghi', + ), + dlp.ListDeidentifyTemplatesResponse( + deidentify_templates=[ + dlp.DeidentifyTemplate(), + dlp.DeidentifyTemplate(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_deidentify_templates(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_delete_deidentify_template(transport: str = 'grpc', request_type=dlp.DeleteDeidentifyTemplateRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_deidentify_template_from_dict(): + test_delete_deidentify_template(request_type=dict) + + +def test_delete_deidentify_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + client.delete_deidentify_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDeidentifyTemplateRequest() + + +@pytest.mark.asyncio +async def test_delete_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteDeidentifyTemplateRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDeidentifyTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_deidentify_template_async_from_dict(): + await test_delete_deidentify_template_async(request_type=dict) + + +def test_delete_deidentify_template_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteDeidentifyTemplateRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + call.return_value = None + client.delete_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_deidentify_template_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteDeidentifyTemplateRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_deidentify_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_deidentify_template_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_deidentify_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_delete_deidentify_template_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_deidentify_template( + dlp.DeleteDeidentifyTemplateRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_deidentify_template_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deidentify_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_deidentify_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_delete_deidentify_template_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_deidentify_template( + dlp.DeleteDeidentifyTemplateRequest(), + name='name_value', + ) + + +def test_create_job_trigger(transport: str = 'grpc', request_type=dlp.CreateJobTriggerRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + ) + response = client.create_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_create_job_trigger_from_dict(): + test_create_job_trigger(request_type=dict) + + +def test_create_job_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + client.create_job_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateJobTriggerRequest() + + +@pytest.mark.asyncio +async def test_create_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateJobTriggerRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + )) + response = await client.create_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +@pytest.mark.asyncio +async def test_create_job_trigger_async_from_dict(): + await test_create_job_trigger_async(request_type=dict) + + +def test_create_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateJobTriggerRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + call.return_value = dlp.JobTrigger() + client.create_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateJobTriggerRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + await client.create_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_create_job_trigger_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_job_trigger( + parent='parent_value', + job_trigger=dlp.JobTrigger(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].job_trigger == dlp.JobTrigger(name='name_value') + + +def test_create_job_trigger_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_job_trigger( + dlp.CreateJobTriggerRequest(), + parent='parent_value', + job_trigger=dlp.JobTrigger(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_create_job_trigger_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_job_trigger( + parent='parent_value', + job_trigger=dlp.JobTrigger(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].job_trigger == dlp.JobTrigger(name='name_value') + + +@pytest.mark.asyncio +async def test_create_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_job_trigger( + dlp.CreateJobTriggerRequest(), + parent='parent_value', + job_trigger=dlp.JobTrigger(name='name_value'), + ) + + +def test_update_job_trigger(transport: str = 'grpc', request_type=dlp.UpdateJobTriggerRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + ) + response = client.update_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_update_job_trigger_from_dict(): + test_update_job_trigger(request_type=dict) + + +def test_update_job_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + client.update_job_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateJobTriggerRequest() + + +@pytest.mark.asyncio +async def test_update_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateJobTriggerRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + )) + response = await client.update_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +@pytest.mark.asyncio +async def test_update_job_trigger_async_from_dict(): + await test_update_job_trigger_async(request_type=dict) + + +def test_update_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateJobTriggerRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + call.return_value = dlp.JobTrigger() + client.update_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateJobTriggerRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + await client.update_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_update_job_trigger_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_job_trigger( + name='name_value', + job_trigger=dlp.JobTrigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].job_trigger == dlp.JobTrigger(name='name_value') + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + + +def test_update_job_trigger_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_job_trigger( + dlp.UpdateJobTriggerRequest(), + name='name_value', + job_trigger=dlp.JobTrigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.asyncio +async def test_update_job_trigger_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_job_trigger( + name='name_value', + job_trigger=dlp.JobTrigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].job_trigger == dlp.JobTrigger(name='name_value') + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + + +@pytest.mark.asyncio +async def test_update_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_job_trigger( + dlp.UpdateJobTriggerRequest(), + name='name_value', + job_trigger=dlp.JobTrigger(name='name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_hybrid_inspect_job_trigger(transport: str = 'grpc', request_type=dlp.HybridInspectJobTriggerRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse( + ) + response = client.hybrid_inspect_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +def test_hybrid_inspect_job_trigger_from_dict(): + test_hybrid_inspect_job_trigger(request_type=dict) + + +def test_hybrid_inspect_job_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + client.hybrid_inspect_job_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectJobTriggerRequest() + + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.HybridInspectJobTriggerRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse( + )) + response = await client.hybrid_inspect_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_async_from_dict(): + await test_hybrid_inspect_job_trigger_async(request_type=dict) + + +def test_hybrid_inspect_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.HybridInspectJobTriggerRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + call.return_value = dlp.HybridInspectResponse() + client.hybrid_inspect_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.HybridInspectJobTriggerRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) + await client.hybrid_inspect_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_hybrid_inspect_job_trigger_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.hybrid_inspect_job_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_hybrid_inspect_job_trigger_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.hybrid_inspect_job_trigger( + dlp.HybridInspectJobTriggerRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.hybrid_inspect_job_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_hybrid_inspect_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.hybrid_inspect_job_trigger( + dlp.HybridInspectJobTriggerRequest(), + name='name_value', + ) + + +def test_get_job_trigger(transport: str = 'grpc', request_type=dlp.GetJobTriggerRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + ) + response = client.get_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +def test_get_job_trigger_from_dict(): + test_get_job_trigger(request_type=dict) + + +def test_get_job_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + client.get_job_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetJobTriggerRequest() + + +@pytest.mark.asyncio +async def test_get_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.GetJobTriggerRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( + name='name_value', + display_name='display_name_value', + description='description_value', + status=dlp.JobTrigger.Status.HEALTHY, + )) + response = await client.get_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.JobTrigger) + assert response.name == 'name_value' + assert response.display_name == 'display_name_value' + assert response.description == 'description_value' + assert response.status == dlp.JobTrigger.Status.HEALTHY + + +@pytest.mark.asyncio +async def test_get_job_trigger_async_from_dict(): + await test_get_job_trigger_async(request_type=dict) + + +def test_get_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetJobTriggerRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + call.return_value = dlp.JobTrigger() + client.get_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetJobTriggerRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + await client.get_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_job_trigger_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_job_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_job_trigger_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job_trigger( + dlp.GetJobTriggerRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_job_trigger_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.JobTrigger() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_job_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_job_trigger( + dlp.GetJobTriggerRequest(), + name='name_value', + ) + + +def test_list_job_triggers(transport: str = 'grpc', request_type=dlp.ListJobTriggersRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListJobTriggersResponse( + next_page_token='next_page_token_value', + ) + response = client.list_job_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListJobTriggersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobTriggersPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_job_triggers_from_dict(): + test_list_job_triggers(request_type=dict) + + +def test_list_job_triggers_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + client.list_job_triggers() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListJobTriggersRequest() + + +@pytest.mark.asyncio +async def test_list_job_triggers_async(transport: str = 'grpc_asyncio', request_type=dlp.ListJobTriggersRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_job_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListJobTriggersRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobTriggersAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_job_triggers_async_from_dict(): + await test_list_job_triggers_async(request_type=dict) + + +def test_list_job_triggers_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListJobTriggersRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + call.return_value = dlp.ListJobTriggersResponse() + client.list_job_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_job_triggers_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListJobTriggersRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse()) + await client.list_job_triggers(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_job_triggers_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListJobTriggersResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_job_triggers( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_job_triggers_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_job_triggers( + dlp.ListJobTriggersRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_job_triggers_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListJobTriggersResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_job_triggers( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_job_triggers_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_job_triggers( + dlp.ListJobTriggersRequest(), + parent='parent_value', + ) + + +def test_list_job_triggers_pager(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + next_page_token='abc', + ), + dlp.ListJobTriggersResponse( + job_triggers=[], + next_page_token='def', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + ], + next_page_token='ghi', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_job_triggers(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, dlp.JobTrigger) + for i in results) + +def test_list_job_triggers_pages(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + next_page_token='abc', + ), + dlp.ListJobTriggersResponse( + job_triggers=[], + next_page_token='def', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + ], + next_page_token='ghi', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + ), + RuntimeError, + ) + pages = list(client.list_job_triggers(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_job_triggers_async_pager(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + next_page_token='abc', + ), + dlp.ListJobTriggersResponse( + job_triggers=[], + next_page_token='def', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + ], + next_page_token='ghi', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_job_triggers(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.JobTrigger) + for i in responses) + +@pytest.mark.asyncio +async def test_list_job_triggers_async_pages(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_triggers), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + next_page_token='abc', + ), + dlp.ListJobTriggersResponse( + job_triggers=[], + next_page_token='def', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + ], + next_page_token='ghi', + ), + dlp.ListJobTriggersResponse( + job_triggers=[ + dlp.JobTrigger(), + dlp.JobTrigger(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_job_triggers(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_delete_job_trigger(transport: str = 'grpc', request_type=dlp.DeleteJobTriggerRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_job_trigger_from_dict(): + test_delete_job_trigger(request_type=dict) + + +def test_delete_job_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + client.delete_job_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteJobTriggerRequest() + + +@pytest.mark.asyncio +async def test_delete_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteJobTriggerRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_job_trigger_async_from_dict(): + await test_delete_job_trigger_async(request_type=dict) + + +def test_delete_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteJobTriggerRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + call.return_value = None + client.delete_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteJobTriggerRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_job_trigger_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_job_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_delete_job_trigger_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_job_trigger( + dlp.DeleteJobTriggerRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_job_trigger_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_job_trigger( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_delete_job_trigger_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_job_trigger( + dlp.DeleteJobTriggerRequest(), + name='name_value', + ) + + +def test_activate_job_trigger(transport: str = 'grpc', request_type=dlp.ActivateJobTriggerRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + response = client.activate_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ActivateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +def test_activate_job_trigger_from_dict(): + test_activate_job_trigger(request_type=dict) + + +def test_activate_job_trigger_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), + '__call__') as call: + client.activate_job_trigger() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ActivateJobTriggerRequest() + + +@pytest.mark.asyncio +async def test_activate_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.ActivateJobTriggerRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + )) + response = await client.activate_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ActivateJobTriggerRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +@pytest.mark.asyncio +async def test_activate_job_trigger_async_from_dict(): + await test_activate_job_trigger_async(request_type=dict) + + +def test_activate_job_trigger_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ActivateJobTriggerRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), + '__call__') as call: + call.return_value = dlp.DlpJob() + client.activate_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_activate_job_trigger_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ActivateJobTriggerRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.activate_job_trigger), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + await client.activate_job_trigger(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_create_dlp_job(transport: str = 'grpc', request_type=dlp.CreateDlpJobRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + response = client.create_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +def test_create_dlp_job_from_dict(): + test_create_dlp_job(request_type=dict) + + +def test_create_dlp_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + client.create_dlp_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDlpJobRequest() + + +@pytest.mark.asyncio +async def test_create_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateDlpJobRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + )) + response = await client.create_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +@pytest.mark.asyncio +async def test_create_dlp_job_async_from_dict(): + await test_create_dlp_job_async(request_type=dict) + + +def test_create_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateDlpJobRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + call.return_value = dlp.DlpJob() + client.create_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateDlpJobRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + await client.create_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_create_dlp_job_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_dlp_job( + parent='parent_value', + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].risk_job == dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))) + + +def test_create_dlp_job_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_dlp_job( + dlp.CreateDlpJobRequest(), + parent='parent_value', + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + + +@pytest.mark.asyncio +async def test_create_dlp_job_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_dlp_job( + parent='parent_value', + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].risk_job == dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))) + + +@pytest.mark.asyncio +async def test_create_dlp_job_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_dlp_job( + dlp.CreateDlpJobRequest(), + parent='parent_value', + inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), + risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + + +def test_list_dlp_jobs(transport: str = 'grpc', request_type=dlp.ListDlpJobsRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDlpJobsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_dlp_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDlpJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDlpJobsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_dlp_jobs_from_dict(): + test_list_dlp_jobs(request_type=dict) + + +def test_list_dlp_jobs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + client.list_dlp_jobs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDlpJobsRequest() + + +@pytest.mark.asyncio +async def test_list_dlp_jobs_async(transport: str = 'grpc_asyncio', request_type=dlp.ListDlpJobsRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_dlp_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListDlpJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDlpJobsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_dlp_jobs_async_from_dict(): + await test_list_dlp_jobs_async(request_type=dict) + + +def test_list_dlp_jobs_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListDlpJobsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + call.return_value = dlp.ListDlpJobsResponse() + client.list_dlp_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_dlp_jobs_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListDlpJobsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse()) + await client.list_dlp_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_dlp_jobs_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDlpJobsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_dlp_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_dlp_jobs_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_dlp_jobs( + dlp.ListDlpJobsRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_dlp_jobs_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListDlpJobsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_dlp_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_dlp_jobs_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_dlp_jobs( + dlp.ListDlpJobsRequest(), + parent='parent_value', + ) + + +def test_list_dlp_jobs_pager(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + dlp.DlpJob(), + ], + next_page_token='abc', + ), + dlp.ListDlpJobsResponse( + jobs=[], + next_page_token='def', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + ], + next_page_token='ghi', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_dlp_jobs(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, dlp.DlpJob) + for i in results) + +def test_list_dlp_jobs_pages(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + dlp.DlpJob(), + ], + next_page_token='abc', + ), + dlp.ListDlpJobsResponse( + jobs=[], + next_page_token='def', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + ], + next_page_token='ghi', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + ], + ), + RuntimeError, + ) + pages = list(client.list_dlp_jobs(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_dlp_jobs_async_pager(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + dlp.DlpJob(), + ], + next_page_token='abc', + ), + dlp.ListDlpJobsResponse( + jobs=[], + next_page_token='def', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + ], + next_page_token='ghi', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_dlp_jobs(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.DlpJob) + for i in responses) + +@pytest.mark.asyncio +async def test_list_dlp_jobs_async_pages(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_dlp_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + dlp.DlpJob(), + ], + next_page_token='abc', + ), + dlp.ListDlpJobsResponse( + jobs=[], + next_page_token='def', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + ], + next_page_token='ghi', + ), + dlp.ListDlpJobsResponse( + jobs=[ + dlp.DlpJob(), + dlp.DlpJob(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_dlp_jobs(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_get_dlp_job(transport: str = 'grpc', request_type=dlp.GetDlpJobRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), + ) + response = client.get_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +def test_get_dlp_job_from_dict(): + test_get_dlp_job(request_type=dict) + + +def test_get_dlp_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + client.get_dlp_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDlpJobRequest() + + +@pytest.mark.asyncio +async def test_get_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.GetDlpJobRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( + name='name_value', + type_=dlp.DlpJobType.INSPECT_JOB, + state=dlp.DlpJob.JobState.PENDING, + job_trigger_name='job_trigger_name_value', + )) + response = await client.get_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.DlpJob) + assert response.name == 'name_value' + assert response.type_ == dlp.DlpJobType.INSPECT_JOB + assert response.state == dlp.DlpJob.JobState.PENDING + assert response.job_trigger_name == 'job_trigger_name_value' + + +@pytest.mark.asyncio +async def test_get_dlp_job_async_from_dict(): + await test_get_dlp_job_async(request_type=dict) + + +def test_get_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetDlpJobRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + call.return_value = dlp.DlpJob() + client.get_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetDlpJobRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + await client.get_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_dlp_job_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_dlp_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_dlp_job_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_dlp_job( + dlp.GetDlpJobRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_dlp_job_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.DlpJob() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_dlp_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_dlp_job_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_dlp_job( + dlp.GetDlpJobRequest(), + name='name_value', + ) + + +def test_delete_dlp_job(transport: str = 'grpc', request_type=dlp.DeleteDlpJobRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_dlp_job_from_dict(): + test_delete_dlp_job(request_type=dict) + + +def test_delete_dlp_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + client.delete_dlp_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDlpJobRequest() + + +@pytest.mark.asyncio +async def test_delete_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteDlpJobRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_dlp_job_async_from_dict(): + await test_delete_dlp_job_async(request_type=dict) + + +def test_delete_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteDlpJobRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + call.return_value = None + client.delete_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteDlpJobRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_dlp_job_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_dlp_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_delete_dlp_job_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_dlp_job( + dlp.DeleteDlpJobRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_dlp_job_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_dlp_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_delete_dlp_job_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_dlp_job( + dlp.DeleteDlpJobRequest(), + name='name_value', + ) + + +def test_cancel_dlp_job(transport: str = 'grpc', request_type=dlp.CancelDlpJobRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CancelDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_dlp_job_from_dict(): + test_cancel_dlp_job(request_type=dict) + + +def test_cancel_dlp_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_dlp_job), + '__call__') as call: + client.cancel_dlp_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CancelDlpJobRequest() + + +@pytest.mark.asyncio +async def test_cancel_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.CancelDlpJobRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CancelDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_dlp_job_async_from_dict(): + await test_cancel_dlp_job_async(request_type=dict) + + +def test_cancel_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CancelDlpJobRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_dlp_job), + '__call__') as call: + call.return_value = None + client.cancel_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_cancel_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CancelDlpJobRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.cancel_dlp_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_create_stored_info_type(transport: str = 'grpc', request_type=dlp.CreateStoredInfoTypeRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType( + name='name_value', + ) + response = client.create_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +def test_create_stored_info_type_from_dict(): + test_create_stored_info_type(request_type=dict) + + +def test_create_stored_info_type_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + client.create_stored_info_type() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateStoredInfoTypeRequest() + + +@pytest.mark.asyncio +async def test_create_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateStoredInfoTypeRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( + name='name_value', + )) + response = await client.create_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.CreateStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_create_stored_info_type_async_from_dict(): + await test_create_stored_info_type_async(request_type=dict) + + +def test_create_stored_info_type_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateStoredInfoTypeRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + call.return_value = dlp.StoredInfoType() + client.create_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_stored_info_type_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.CreateStoredInfoTypeRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + await client.create_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_create_stored_info_type_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_stored_info_type( + parent='parent_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].config == dlp.StoredInfoTypeConfig(display_name='display_name_value') + + +def test_create_stored_info_type_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_stored_info_type( + dlp.CreateStoredInfoTypeRequest(), + parent='parent_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + ) + + +@pytest.mark.asyncio +async def test_create_stored_info_type_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_stored_info_type( + parent='parent_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].config == dlp.StoredInfoTypeConfig(display_name='display_name_value') + + +@pytest.mark.asyncio +async def test_create_stored_info_type_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_stored_info_type( + dlp.CreateStoredInfoTypeRequest(), + parent='parent_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + ) + + +def test_update_stored_info_type(transport: str = 'grpc', request_type=dlp.UpdateStoredInfoTypeRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType( + name='name_value', + ) + response = client.update_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +def test_update_stored_info_type_from_dict(): + test_update_stored_info_type(request_type=dict) + + +def test_update_stored_info_type_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + client.update_stored_info_type() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateStoredInfoTypeRequest() + + +@pytest.mark.asyncio +async def test_update_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateStoredInfoTypeRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( + name='name_value', + )) + response = await client.update_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.UpdateStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_update_stored_info_type_async_from_dict(): + await test_update_stored_info_type_async(request_type=dict) + + +def test_update_stored_info_type_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateStoredInfoTypeRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + call.return_value = dlp.StoredInfoType() + client.update_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_update_stored_info_type_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.UpdateStoredInfoTypeRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + await client.update_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_update_stored_info_type_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_stored_info_type( + name='name_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].config == dlp.StoredInfoTypeConfig(display_name='display_name_value') + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + + +def test_update_stored_info_type_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_stored_info_type( + dlp.UpdateStoredInfoTypeRequest(), + name='name_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +@pytest.mark.asyncio +async def test_update_stored_info_type_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_stored_info_type( + name='name_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + assert args[0].config == dlp.StoredInfoTypeConfig(display_name='display_name_value') + assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) + + +@pytest.mark.asyncio +async def test_update_stored_info_type_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_stored_info_type( + dlp.UpdateStoredInfoTypeRequest(), + name='name_value', + config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), + update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), + ) + + +def test_get_stored_info_type(transport: str = 'grpc', request_type=dlp.GetStoredInfoTypeRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType( + name='name_value', + ) + response = client.get_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +def test_get_stored_info_type_from_dict(): + test_get_stored_info_type(request_type=dict) + + +def test_get_stored_info_type_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + client.get_stored_info_type() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetStoredInfoTypeRequest() + + +@pytest.mark.asyncio +async def test_get_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.GetStoredInfoTypeRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( + name='name_value', + )) + response = await client.get_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.GetStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.StoredInfoType) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_stored_info_type_async_from_dict(): + await test_get_stored_info_type_async(request_type=dict) + + +def test_get_stored_info_type_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetStoredInfoTypeRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + call.return_value = dlp.StoredInfoType() + client.get_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_stored_info_type_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.GetStoredInfoTypeRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + await client.get_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_stored_info_type_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_stored_info_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_stored_info_type_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_stored_info_type( + dlp.GetStoredInfoTypeRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_stored_info_type_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.StoredInfoType() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_stored_info_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_stored_info_type_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_stored_info_type( + dlp.GetStoredInfoTypeRequest(), + name='name_value', + ) + + +def test_list_stored_info_types(transport: str = 'grpc', request_type=dlp.ListStoredInfoTypesRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListStoredInfoTypesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_stored_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListStoredInfoTypesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListStoredInfoTypesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_stored_info_types_from_dict(): + test_list_stored_info_types(request_type=dict) + + +def test_list_stored_info_types_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + client.list_stored_info_types() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListStoredInfoTypesRequest() + + +@pytest.mark.asyncio +async def test_list_stored_info_types_async(transport: str = 'grpc_asyncio', request_type=dlp.ListStoredInfoTypesRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_stored_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.ListStoredInfoTypesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListStoredInfoTypesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_stored_info_types_async_from_dict(): + await test_list_stored_info_types_async(request_type=dict) + + +def test_list_stored_info_types_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListStoredInfoTypesRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + call.return_value = dlp.ListStoredInfoTypesResponse() + client.list_stored_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_stored_info_types_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.ListStoredInfoTypesRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse()) + await client.list_stored_info_types(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_stored_info_types_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListStoredInfoTypesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_stored_info_types( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_stored_info_types_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_stored_info_types( + dlp.ListStoredInfoTypesRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_stored_info_types_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.ListStoredInfoTypesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_stored_info_types( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_stored_info_types_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_stored_info_types( + dlp.ListStoredInfoTypesRequest(), + parent='parent_value', + ) + + +def test_list_stored_info_types_pager(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token='abc', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], + next_page_token='def', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + ], + next_page_token='ghi', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_stored_info_types(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, dlp.StoredInfoType) + for i in results) + +def test_list_stored_info_types_pages(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token='abc', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], + next_page_token='def', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + ], + next_page_token='ghi', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + ), + RuntimeError, + ) + pages = list(client.list_stored_info_types(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_stored_info_types_async_pager(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token='abc', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], + next_page_token='def', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + ], + next_page_token='ghi', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_stored_info_types(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dlp.StoredInfoType) + for i in responses) + +@pytest.mark.asyncio +async def test_list_stored_info_types_async_pages(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_stored_info_types), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + next_page_token='abc', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[], + next_page_token='def', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + ], + next_page_token='ghi', + ), + dlp.ListStoredInfoTypesResponse( + stored_info_types=[ + dlp.StoredInfoType(), + dlp.StoredInfoType(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_stored_info_types(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_delete_stored_info_type(transport: str = 'grpc', request_type=dlp.DeleteStoredInfoTypeRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_stored_info_type_from_dict(): + test_delete_stored_info_type(request_type=dict) + + +def test_delete_stored_info_type_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + client.delete_stored_info_type() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteStoredInfoTypeRequest() + + +@pytest.mark.asyncio +async def test_delete_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteStoredInfoTypeRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.DeleteStoredInfoTypeRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_stored_info_type_async_from_dict(): + await test_delete_stored_info_type_async(request_type=dict) + + +def test_delete_stored_info_type_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteStoredInfoTypeRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + call.return_value = None + client.delete_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_stored_info_type_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.DeleteStoredInfoTypeRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_stored_info_type(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_stored_info_type_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_stored_info_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_delete_stored_info_type_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_stored_info_type( + dlp.DeleteStoredInfoTypeRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_stored_info_type_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_stored_info_type), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_stored_info_type( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_delete_stored_info_type_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_stored_info_type( + dlp.DeleteStoredInfoTypeRequest(), + name='name_value', + ) + + +def test_hybrid_inspect_dlp_job(transport: str = 'grpc', request_type=dlp.HybridInspectDlpJobRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse( + ) + response = client.hybrid_inspect_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +def test_hybrid_inspect_dlp_job_from_dict(): + test_hybrid_inspect_dlp_job(request_type=dict) + + +def test_hybrid_inspect_dlp_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + client.hybrid_inspect_dlp_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectDlpJobRequest() + + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.HybridInspectDlpJobRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse( + )) + response = await client.hybrid_inspect_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.HybridInspectDlpJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, dlp.HybridInspectResponse) + + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_async_from_dict(): + await test_hybrid_inspect_dlp_job_async(request_type=dict) + + +def test_hybrid_inspect_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.HybridInspectDlpJobRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + call.return_value = dlp.HybridInspectResponse() + client.hybrid_inspect_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.HybridInspectDlpJobRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) + await client.hybrid_inspect_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_hybrid_inspect_dlp_job_flattened(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.hybrid_inspect_dlp_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_hybrid_inspect_dlp_job_flattened_error(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.hybrid_inspect_dlp_job( + dlp.HybridInspectDlpJobRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_flattened_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.hybrid_inspect_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = dlp.HybridInspectResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.hybrid_inspect_dlp_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_hybrid_inspect_dlp_job_flattened_error_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.hybrid_inspect_dlp_job( + dlp.HybridInspectDlpJobRequest(), + name='name_value', + ) + + +def test_finish_dlp_job(transport: str = 'grpc', request_type=dlp.FinishDlpJobRequest): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.finish_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.finish_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.FinishDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_finish_dlp_job_from_dict(): + test_finish_dlp_job(request_type=dict) + + +def test_finish_dlp_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.finish_dlp_job), + '__call__') as call: + client.finish_dlp_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.FinishDlpJobRequest() + + +@pytest.mark.asyncio +async def test_finish_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.FinishDlpJobRequest): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.finish_dlp_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.finish_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == dlp.FinishDlpJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_finish_dlp_job_async_from_dict(): + await test_finish_dlp_job_async(request_type=dict) + + +def test_finish_dlp_job_field_headers(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.FinishDlpJobRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.finish_dlp_job), + '__call__') as call: + call.return_value = None + client.finish_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_finish_dlp_job_field_headers_async(): + client = DlpServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dlp.FinishDlpJobRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.finish_dlp_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.finish_dlp_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DlpServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DlpServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DlpServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DlpServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DlpServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.DlpServiceGrpcTransport, + transports.DlpServiceGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DlpServiceGrpcTransport, + ) + +def test_dlp_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DlpServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_dlp_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.DlpServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'inspect_content', + 'redact_image', + 'deidentify_content', + 'reidentify_content', + 'list_info_types', + 'create_inspect_template', + 'update_inspect_template', + 'get_inspect_template', + 'list_inspect_templates', + 'delete_inspect_template', + 'create_deidentify_template', + 'update_deidentify_template', + 'get_deidentify_template', + 'list_deidentify_templates', + 'delete_deidentify_template', + 'create_job_trigger', + 'update_job_trigger', + 'hybrid_inspect_job_trigger', + 'get_job_trigger', + 'list_job_triggers', + 'delete_job_trigger', + 'activate_job_trigger', + 'create_dlp_job', + 'list_dlp_jobs', + 'get_dlp_job', + 'delete_dlp_job', + 'cancel_dlp_job', + 'create_stored_info_type', + 'update_stored_info_type', + 'get_stored_info_type', + 'list_stored_info_types', + 'delete_stored_info_type', + 'hybrid_inspect_dlp_job', + 'finish_dlp_job', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +@requires_google_auth_gte_1_25_0 +def test_dlp_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DlpServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_dlp_service_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DlpServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), + quota_project_id="octopus", + ) + + +def test_dlp_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DlpServiceTransport() + adc.assert_called_once() + + +@requires_google_auth_gte_1_25_0 +def test_dlp_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DlpServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_dlp_service_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DlpServiceClient() + adc.assert_called_once_with( + scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DlpServiceGrpcTransport, + transports.DlpServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_dlp_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DlpServiceGrpcTransport, + transports.DlpServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_dlp_service_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") + adc.assert_called_once_with(scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DlpServiceGrpcTransport, grpc_helpers), + (transports.DlpServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_dlp_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "dlp.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="dlp.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport]) +def test_dlp_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +def test_dlp_service_host_no_port(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dlp.googleapis.com'), + ) + assert client.transport._host == 'dlp.googleapis.com:443' + + +def test_dlp_service_host_with_port(): + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='dlp.googleapis.com:8000'), + ) + assert client.transport._host == 'dlp.googleapis.com:8000' + +def test_dlp_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DlpServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_dlp_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DlpServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport]) +def test_dlp_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport]) +def test_dlp_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_deidentify_template_path(): + organization = "squid" + deidentify_template = "clam" + expected = "organizations/{organization}/deidentifyTemplates/{deidentify_template}".format(organization=organization, deidentify_template=deidentify_template, ) + actual = DlpServiceClient.deidentify_template_path(organization, deidentify_template) + assert expected == actual + + +def test_parse_deidentify_template_path(): + expected = { + "organization": "whelk", + "deidentify_template": "octopus", + } + path = DlpServiceClient.deidentify_template_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_deidentify_template_path(path) + assert expected == actual + +def test_dlp_content_path(): + project = "oyster" + expected = "projects/{project}/dlpContent".format(project=project, ) + actual = DlpServiceClient.dlp_content_path(project) + assert expected == actual + + +def test_parse_dlp_content_path(): + expected = { + "project": "nudibranch", + } + path = DlpServiceClient.dlp_content_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_dlp_content_path(path) + assert expected == actual + +def test_dlp_job_path(): + project = "cuttlefish" + dlp_job = "mussel" + expected = "projects/{project}/dlpJobs/{dlp_job}".format(project=project, dlp_job=dlp_job, ) + actual = DlpServiceClient.dlp_job_path(project, dlp_job) + assert expected == actual + + +def test_parse_dlp_job_path(): + expected = { + "project": "winkle", + "dlp_job": "nautilus", + } + path = DlpServiceClient.dlp_job_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_dlp_job_path(path) + assert expected == actual + +def test_finding_path(): + project = "scallop" + location = "abalone" + finding = "squid" + expected = "projects/{project}/locations/{location}/findings/{finding}".format(project=project, location=location, finding=finding, ) + actual = DlpServiceClient.finding_path(project, location, finding) + assert expected == actual + + +def test_parse_finding_path(): + expected = { + "project": "clam", + "location": "whelk", + "finding": "octopus", + } + path = DlpServiceClient.finding_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_finding_path(path) + assert expected == actual + +def test_inspect_template_path(): + organization = "oyster" + inspect_template = "nudibranch" + expected = "organizations/{organization}/inspectTemplates/{inspect_template}".format(organization=organization, inspect_template=inspect_template, ) + actual = DlpServiceClient.inspect_template_path(organization, inspect_template) + assert expected == actual + + +def test_parse_inspect_template_path(): + expected = { + "organization": "cuttlefish", + "inspect_template": "mussel", + } + path = DlpServiceClient.inspect_template_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_inspect_template_path(path) + assert expected == actual + +def test_job_trigger_path(): + project = "winkle" + job_trigger = "nautilus" + expected = "projects/{project}/jobTriggers/{job_trigger}".format(project=project, job_trigger=job_trigger, ) + actual = DlpServiceClient.job_trigger_path(project, job_trigger) + assert expected == actual + + +def test_parse_job_trigger_path(): + expected = { + "project": "scallop", + "job_trigger": "abalone", + } + path = DlpServiceClient.job_trigger_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_job_trigger_path(path) + assert expected == actual + +def test_stored_info_type_path(): + organization = "squid" + stored_info_type = "clam" + expected = "organizations/{organization}/storedInfoTypes/{stored_info_type}".format(organization=organization, stored_info_type=stored_info_type, ) + actual = DlpServiceClient.stored_info_type_path(organization, stored_info_type) + assert expected == actual + + +def test_parse_stored_info_type_path(): + expected = { + "organization": "whelk", + "stored_info_type": "octopus", + } + path = DlpServiceClient.stored_info_type_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_stored_info_type_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "oyster" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = DlpServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "nudibranch", + } + path = DlpServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "cuttlefish" + expected = "folders/{folder}".format(folder=folder, ) + actual = DlpServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "mussel", + } + path = DlpServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "winkle" + expected = "organizations/{organization}".format(organization=organization, ) + actual = DlpServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nautilus", + } + path = DlpServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "scallop" + expected = "projects/{project}".format(project=project, ) + actual = DlpServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "abalone", + } + path = DlpServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "squid" + location = "clam" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = DlpServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "whelk", + "location": "octopus", + } + path = DlpServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DlpServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.DlpServiceTransport, '_prep_wrapped_messages') as prep: + client = DlpServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.DlpServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = DlpServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) From f17adaec69b0f7e8a40d359a0e1c80f159160e1c Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 29 Jun 2021 22:22:15 +0000 Subject: [PATCH 2/2] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/master/packages/owl-bot/README.md --- .../services/dlp_service/transports/base.py | 2 +- .../services/dlp_service/transports/grpc.py | 5 +- .../dlp_service/transports/grpc_asyncio.py | 5 +- owl-bot-staging/v2/.coveragerc | 17 - owl-bot-staging/v2/MANIFEST.in | 2 - owl-bot-staging/v2/README.rst | 49 - owl-bot-staging/v2/docs/conf.py | 376 - .../v2/docs/dlp_v2/dlp_service.rst | 10 - owl-bot-staging/v2/docs/dlp_v2/services.rst | 6 - owl-bot-staging/v2/docs/dlp_v2/types.rst | 7 - owl-bot-staging/v2/docs/index.rst | 7 - .../v2/google/cloud/dlp/__init__.py | 333 - owl-bot-staging/v2/google/cloud/dlp/py.typed | 2 - .../v2/google/cloud/dlp_v2/__init__.py | 334 - .../google/cloud/dlp_v2/gapic_metadata.json | 363 - .../v2/google/cloud/dlp_v2/py.typed | 2 - .../google/cloud/dlp_v2/services/__init__.py | 15 - .../dlp_v2/services/dlp_service/__init__.py | 22 - .../services/dlp_service/async_client.py | 3237 ------ .../dlp_v2/services/dlp_service/client.py | 3345 ------ .../dlp_v2/services/dlp_service/pagers.py | 628 -- .../dlp_service/transports/__init__.py | 33 - .../services/dlp_service/transports/base.py | 771 -- .../services/dlp_service/transports/grpc.py | 1244 --- .../dlp_service/transports/grpc_asyncio.py | 1248 --- .../v2/google/cloud/dlp_v2/types/__init__.py | 332 - .../v2/google/cloud/dlp_v2/types/dlp.py | 6338 ----------- .../v2/google/cloud/dlp_v2/types/storage.py | 1202 --- owl-bot-staging/v2/mypy.ini | 3 - owl-bot-staging/v2/noxfile.py | 132 - .../v2/scripts/fixup_dlp_v2_keywords.py | 209 - owl-bot-staging/v2/setup.py | 53 - owl-bot-staging/v2/tests/__init__.py | 16 - owl-bot-staging/v2/tests/unit/__init__.py | 16 - .../v2/tests/unit/gapic/__init__.py | 16 - .../v2/tests/unit/gapic/dlp_v2/__init__.py | 16 - .../unit/gapic/dlp_v2/test_dlp_service.py | 9393 ----------------- tests/unit/gapic/dlp_v2/test_dlp_service.py | 26 +- 38 files changed, 31 insertions(+), 29784 deletions(-) delete mode 100644 owl-bot-staging/v2/.coveragerc delete mode 100644 owl-bot-staging/v2/MANIFEST.in delete mode 100644 owl-bot-staging/v2/README.rst delete mode 100644 owl-bot-staging/v2/docs/conf.py delete mode 100644 owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst delete mode 100644 owl-bot-staging/v2/docs/dlp_v2/services.rst delete mode 100644 owl-bot-staging/v2/docs/dlp_v2/types.rst delete mode 100644 owl-bot-staging/v2/docs/index.rst delete mode 100644 owl-bot-staging/v2/google/cloud/dlp/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp/py.typed delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/py.typed delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py delete mode 100644 owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py delete mode 100644 owl-bot-staging/v2/mypy.ini delete mode 100644 owl-bot-staging/v2/noxfile.py delete mode 100644 owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py delete mode 100644 owl-bot-staging/v2/setup.py delete mode 100644 owl-bot-staging/v2/tests/__init__.py delete mode 100644 owl-bot-staging/v2/tests/unit/__init__.py delete mode 100644 owl-bot-staging/v2/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py delete mode 100644 owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py diff --git a/google/cloud/dlp_v2/services/dlp_service/transports/base.py b/google/cloud/dlp_v2/services/dlp_service/transports/base.py index 6705db3e..6eb8da64 100644 --- a/google/cloud/dlp_v2/services/dlp_service/transports/base.py +++ b/google/cloud/dlp_v2/services/dlp_service/transports/base.py @@ -97,7 +97,7 @@ def __init__( scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) # Save the scopes. - self._scopes = scopes or self.AUTH_SCOPES + self._scopes = scopes # If no credentials are provided, then determine the appropriate # defaults. diff --git a/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py b/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py index e750ff48..254d17ea 100644 --- a/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py +++ b/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py @@ -66,6 +66,7 @@ def __init__( client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. @@ -106,6 +107,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport @@ -158,7 +161,7 @@ def __init__( scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, - always_use_jwt_access=True, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: diff --git a/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py b/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py index 84de521f..2cbb875f 100644 --- a/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py +++ b/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py @@ -112,6 +112,7 @@ def __init__( client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, ) -> None: """Instantiate the transport. @@ -153,6 +154,8 @@ def __init__( API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport @@ -204,7 +207,7 @@ def __init__( scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, - always_use_jwt_access=True, + always_use_jwt_access=always_use_jwt_access, ) if not self._grpc_channel: diff --git a/owl-bot-staging/v2/.coveragerc b/owl-bot-staging/v2/.coveragerc deleted file mode 100644 index f8366a49..00000000 --- a/owl-bot-staging/v2/.coveragerc +++ /dev/null @@ -1,17 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/dlp/__init__.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ - # Ignore pkg_resources exceptions. - # This is added at the module level as a safeguard for if someone - # generates the code and tries to run it without pip installing. This - # makes it virtually impossible to test properly. - except pkg_resources.DistributionNotFound diff --git a/owl-bot-staging/v2/MANIFEST.in b/owl-bot-staging/v2/MANIFEST.in deleted file mode 100644 index 148f6bf3..00000000 --- a/owl-bot-staging/v2/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/dlp *.py -recursive-include google/cloud/dlp_v2 *.py diff --git a/owl-bot-staging/v2/README.rst b/owl-bot-staging/v2/README.rst deleted file mode 100644 index cf97c2e7..00000000 --- a/owl-bot-staging/v2/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Dlp API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Dlp API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v2/docs/conf.py b/owl-bot-staging/v2/docs/conf.py deleted file mode 100644 index c0dad2c2..00000000 --- a/owl-bot-staging/v2/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-dlp documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = "index" - -# General information about the project. -project = u"google-cloud-dlp" -copyright = u"2020, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-dlp-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - master_doc, - "google-cloud-dlp.tex", - u"google-cloud-dlp Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - master_doc, - "google-cloud-dlp", - u"Google Cloud Dlp Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - master_doc, - "google-cloud-dlp", - u"google-cloud-dlp Documentation", - author, - "google-cloud-dlp", - "GAPIC library for Google Cloud Dlp API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst b/owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst deleted file mode 100644 index 914da512..00000000 --- a/owl-bot-staging/v2/docs/dlp_v2/dlp_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -DlpService ----------------------------- - -.. automodule:: google.cloud.dlp_v2.services.dlp_service - :members: - :inherited-members: - -.. automodule:: google.cloud.dlp_v2.services.dlp_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/v2/docs/dlp_v2/services.rst b/owl-bot-staging/v2/docs/dlp_v2/services.rst deleted file mode 100644 index 864a8c83..00000000 --- a/owl-bot-staging/v2/docs/dlp_v2/services.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Dlp v2 API -==================================== -.. toctree:: - :maxdepth: 2 - - dlp_service diff --git a/owl-bot-staging/v2/docs/dlp_v2/types.rst b/owl-bot-staging/v2/docs/dlp_v2/types.rst deleted file mode 100644 index f2a1a4f5..00000000 --- a/owl-bot-staging/v2/docs/dlp_v2/types.rst +++ /dev/null @@ -1,7 +0,0 @@ -Types for Google Cloud Dlp v2 API -================================= - -.. automodule:: google.cloud.dlp_v2.types - :members: - :undoc-members: - :show-inheritance: diff --git a/owl-bot-staging/v2/docs/index.rst b/owl-bot-staging/v2/docs/index.rst deleted file mode 100644 index d119451a..00000000 --- a/owl-bot-staging/v2/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - dlp_v2/services - dlp_v2/types diff --git a/owl-bot-staging/v2/google/cloud/dlp/__init__.py b/owl-bot-staging/v2/google/cloud/dlp/__init__.py deleted file mode 100644 index 32e9a89f..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp/__init__.py +++ /dev/null @@ -1,333 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.cloud.dlp_v2.services.dlp_service.client import DlpServiceClient -from google.cloud.dlp_v2.services.dlp_service.async_client import DlpServiceAsyncClient - -from google.cloud.dlp_v2.types.dlp import Action -from google.cloud.dlp_v2.types.dlp import ActivateJobTriggerRequest -from google.cloud.dlp_v2.types.dlp import AnalyzeDataSourceRiskDetails -from google.cloud.dlp_v2.types.dlp import BoundingBox -from google.cloud.dlp_v2.types.dlp import BucketingConfig -from google.cloud.dlp_v2.types.dlp import ByteContentItem -from google.cloud.dlp_v2.types.dlp import CancelDlpJobRequest -from google.cloud.dlp_v2.types.dlp import CharacterMaskConfig -from google.cloud.dlp_v2.types.dlp import CharsToIgnore -from google.cloud.dlp_v2.types.dlp import Color -from google.cloud.dlp_v2.types.dlp import Container -from google.cloud.dlp_v2.types.dlp import ContentItem -from google.cloud.dlp_v2.types.dlp import ContentLocation -from google.cloud.dlp_v2.types.dlp import CreateDeidentifyTemplateRequest -from google.cloud.dlp_v2.types.dlp import CreateDlpJobRequest -from google.cloud.dlp_v2.types.dlp import CreateInspectTemplateRequest -from google.cloud.dlp_v2.types.dlp import CreateJobTriggerRequest -from google.cloud.dlp_v2.types.dlp import CreateStoredInfoTypeRequest -from google.cloud.dlp_v2.types.dlp import CryptoDeterministicConfig -from google.cloud.dlp_v2.types.dlp import CryptoHashConfig -from google.cloud.dlp_v2.types.dlp import CryptoKey -from google.cloud.dlp_v2.types.dlp import CryptoReplaceFfxFpeConfig -from google.cloud.dlp_v2.types.dlp import DateShiftConfig -from google.cloud.dlp_v2.types.dlp import DateTime -from google.cloud.dlp_v2.types.dlp import DeidentifyConfig -from google.cloud.dlp_v2.types.dlp import DeidentifyContentRequest -from google.cloud.dlp_v2.types.dlp import DeidentifyContentResponse -from google.cloud.dlp_v2.types.dlp import DeidentifyTemplate -from google.cloud.dlp_v2.types.dlp import DeleteDeidentifyTemplateRequest -from google.cloud.dlp_v2.types.dlp import DeleteDlpJobRequest -from google.cloud.dlp_v2.types.dlp import DeleteInspectTemplateRequest -from google.cloud.dlp_v2.types.dlp import DeleteJobTriggerRequest -from google.cloud.dlp_v2.types.dlp import DeleteStoredInfoTypeRequest -from google.cloud.dlp_v2.types.dlp import DlpJob -from google.cloud.dlp_v2.types.dlp import DocumentLocation -from google.cloud.dlp_v2.types.dlp import Error -from google.cloud.dlp_v2.types.dlp import ExcludeInfoTypes -from google.cloud.dlp_v2.types.dlp import ExclusionRule -from google.cloud.dlp_v2.types.dlp import FieldTransformation -from google.cloud.dlp_v2.types.dlp import Finding -from google.cloud.dlp_v2.types.dlp import FinishDlpJobRequest -from google.cloud.dlp_v2.types.dlp import FixedSizeBucketingConfig -from google.cloud.dlp_v2.types.dlp import GetDeidentifyTemplateRequest -from google.cloud.dlp_v2.types.dlp import GetDlpJobRequest -from google.cloud.dlp_v2.types.dlp import GetInspectTemplateRequest -from google.cloud.dlp_v2.types.dlp import GetJobTriggerRequest -from google.cloud.dlp_v2.types.dlp import GetStoredInfoTypeRequest -from google.cloud.dlp_v2.types.dlp import HybridContentItem -from google.cloud.dlp_v2.types.dlp import HybridFindingDetails -from google.cloud.dlp_v2.types.dlp import HybridInspectDlpJobRequest -from google.cloud.dlp_v2.types.dlp import HybridInspectJobTriggerRequest -from google.cloud.dlp_v2.types.dlp import HybridInspectResponse -from google.cloud.dlp_v2.types.dlp import HybridInspectStatistics -from google.cloud.dlp_v2.types.dlp import ImageLocation -from google.cloud.dlp_v2.types.dlp import InfoTypeDescription -from google.cloud.dlp_v2.types.dlp import InfoTypeStats -from google.cloud.dlp_v2.types.dlp import InfoTypeTransformations -from google.cloud.dlp_v2.types.dlp import InspectConfig -from google.cloud.dlp_v2.types.dlp import InspectContentRequest -from google.cloud.dlp_v2.types.dlp import InspectContentResponse -from google.cloud.dlp_v2.types.dlp import InspectDataSourceDetails -from google.cloud.dlp_v2.types.dlp import InspectionRule -from google.cloud.dlp_v2.types.dlp import InspectionRuleSet -from google.cloud.dlp_v2.types.dlp import InspectJobConfig -from google.cloud.dlp_v2.types.dlp import InspectResult -from google.cloud.dlp_v2.types.dlp import InspectTemplate -from google.cloud.dlp_v2.types.dlp import JobTrigger -from google.cloud.dlp_v2.types.dlp import KmsWrappedCryptoKey -from google.cloud.dlp_v2.types.dlp import LargeCustomDictionaryConfig -from google.cloud.dlp_v2.types.dlp import LargeCustomDictionaryStats -from google.cloud.dlp_v2.types.dlp import ListDeidentifyTemplatesRequest -from google.cloud.dlp_v2.types.dlp import ListDeidentifyTemplatesResponse -from google.cloud.dlp_v2.types.dlp import ListDlpJobsRequest -from google.cloud.dlp_v2.types.dlp import ListDlpJobsResponse -from google.cloud.dlp_v2.types.dlp import ListInfoTypesRequest -from google.cloud.dlp_v2.types.dlp import ListInfoTypesResponse -from google.cloud.dlp_v2.types.dlp import ListInspectTemplatesRequest -from google.cloud.dlp_v2.types.dlp import ListInspectTemplatesResponse -from google.cloud.dlp_v2.types.dlp import ListJobTriggersRequest -from google.cloud.dlp_v2.types.dlp import ListJobTriggersResponse -from google.cloud.dlp_v2.types.dlp import ListStoredInfoTypesRequest -from google.cloud.dlp_v2.types.dlp import ListStoredInfoTypesResponse -from google.cloud.dlp_v2.types.dlp import Location -from google.cloud.dlp_v2.types.dlp import Manual -from google.cloud.dlp_v2.types.dlp import MetadataLocation -from google.cloud.dlp_v2.types.dlp import OutputStorageConfig -from google.cloud.dlp_v2.types.dlp import PrimitiveTransformation -from google.cloud.dlp_v2.types.dlp import PrivacyMetric -from google.cloud.dlp_v2.types.dlp import QuasiId -from google.cloud.dlp_v2.types.dlp import QuoteInfo -from google.cloud.dlp_v2.types.dlp import Range -from google.cloud.dlp_v2.types.dlp import RecordCondition -from google.cloud.dlp_v2.types.dlp import RecordLocation -from google.cloud.dlp_v2.types.dlp import RecordSuppression -from google.cloud.dlp_v2.types.dlp import RecordTransformations -from google.cloud.dlp_v2.types.dlp import RedactConfig -from google.cloud.dlp_v2.types.dlp import RedactImageRequest -from google.cloud.dlp_v2.types.dlp import RedactImageResponse -from google.cloud.dlp_v2.types.dlp import ReidentifyContentRequest -from google.cloud.dlp_v2.types.dlp import ReidentifyContentResponse -from google.cloud.dlp_v2.types.dlp import ReplaceValueConfig -from google.cloud.dlp_v2.types.dlp import ReplaceWithInfoTypeConfig -from google.cloud.dlp_v2.types.dlp import RiskAnalysisJobConfig -from google.cloud.dlp_v2.types.dlp import Schedule -from google.cloud.dlp_v2.types.dlp import StatisticalTable -from google.cloud.dlp_v2.types.dlp import StorageMetadataLabel -from google.cloud.dlp_v2.types.dlp import StoredInfoType -from google.cloud.dlp_v2.types.dlp import StoredInfoTypeConfig -from google.cloud.dlp_v2.types.dlp import StoredInfoTypeStats -from google.cloud.dlp_v2.types.dlp import StoredInfoTypeVersion -from google.cloud.dlp_v2.types.dlp import Table -from google.cloud.dlp_v2.types.dlp import TableLocation -from google.cloud.dlp_v2.types.dlp import TimePartConfig -from google.cloud.dlp_v2.types.dlp import TransformationErrorHandling -from google.cloud.dlp_v2.types.dlp import TransformationOverview -from google.cloud.dlp_v2.types.dlp import TransformationSummary -from google.cloud.dlp_v2.types.dlp import TransientCryptoKey -from google.cloud.dlp_v2.types.dlp import UnwrappedCryptoKey -from google.cloud.dlp_v2.types.dlp import UpdateDeidentifyTemplateRequest -from google.cloud.dlp_v2.types.dlp import UpdateInspectTemplateRequest -from google.cloud.dlp_v2.types.dlp import UpdateJobTriggerRequest -from google.cloud.dlp_v2.types.dlp import UpdateStoredInfoTypeRequest -from google.cloud.dlp_v2.types.dlp import Value -from google.cloud.dlp_v2.types.dlp import ValueFrequency -from google.cloud.dlp_v2.types.dlp import ContentOption -from google.cloud.dlp_v2.types.dlp import DlpJobType -from google.cloud.dlp_v2.types.dlp import InfoTypeSupportedBy -from google.cloud.dlp_v2.types.dlp import MatchingType -from google.cloud.dlp_v2.types.dlp import MetadataType -from google.cloud.dlp_v2.types.dlp import RelationalOperator -from google.cloud.dlp_v2.types.dlp import StoredInfoTypeState -from google.cloud.dlp_v2.types.storage import BigQueryField -from google.cloud.dlp_v2.types.storage import BigQueryKey -from google.cloud.dlp_v2.types.storage import BigQueryOptions -from google.cloud.dlp_v2.types.storage import BigQueryTable -from google.cloud.dlp_v2.types.storage import CloudStorageFileSet -from google.cloud.dlp_v2.types.storage import CloudStorageOptions -from google.cloud.dlp_v2.types.storage import CloudStoragePath -from google.cloud.dlp_v2.types.storage import CloudStorageRegexFileSet -from google.cloud.dlp_v2.types.storage import CustomInfoType -from google.cloud.dlp_v2.types.storage import DatastoreKey -from google.cloud.dlp_v2.types.storage import DatastoreOptions -from google.cloud.dlp_v2.types.storage import EntityId -from google.cloud.dlp_v2.types.storage import FieldId -from google.cloud.dlp_v2.types.storage import HybridOptions -from google.cloud.dlp_v2.types.storage import InfoType -from google.cloud.dlp_v2.types.storage import Key -from google.cloud.dlp_v2.types.storage import KindExpression -from google.cloud.dlp_v2.types.storage import PartitionId -from google.cloud.dlp_v2.types.storage import RecordKey -from google.cloud.dlp_v2.types.storage import StorageConfig -from google.cloud.dlp_v2.types.storage import StoredType -from google.cloud.dlp_v2.types.storage import TableOptions -from google.cloud.dlp_v2.types.storage import FileType -from google.cloud.dlp_v2.types.storage import Likelihood - -__all__ = ('DlpServiceClient', - 'DlpServiceAsyncClient', - 'Action', - 'ActivateJobTriggerRequest', - 'AnalyzeDataSourceRiskDetails', - 'BoundingBox', - 'BucketingConfig', - 'ByteContentItem', - 'CancelDlpJobRequest', - 'CharacterMaskConfig', - 'CharsToIgnore', - 'Color', - 'Container', - 'ContentItem', - 'ContentLocation', - 'CreateDeidentifyTemplateRequest', - 'CreateDlpJobRequest', - 'CreateInspectTemplateRequest', - 'CreateJobTriggerRequest', - 'CreateStoredInfoTypeRequest', - 'CryptoDeterministicConfig', - 'CryptoHashConfig', - 'CryptoKey', - 'CryptoReplaceFfxFpeConfig', - 'DateShiftConfig', - 'DateTime', - 'DeidentifyConfig', - 'DeidentifyContentRequest', - 'DeidentifyContentResponse', - 'DeidentifyTemplate', - 'DeleteDeidentifyTemplateRequest', - 'DeleteDlpJobRequest', - 'DeleteInspectTemplateRequest', - 'DeleteJobTriggerRequest', - 'DeleteStoredInfoTypeRequest', - 'DlpJob', - 'DocumentLocation', - 'Error', - 'ExcludeInfoTypes', - 'ExclusionRule', - 'FieldTransformation', - 'Finding', - 'FinishDlpJobRequest', - 'FixedSizeBucketingConfig', - 'GetDeidentifyTemplateRequest', - 'GetDlpJobRequest', - 'GetInspectTemplateRequest', - 'GetJobTriggerRequest', - 'GetStoredInfoTypeRequest', - 'HybridContentItem', - 'HybridFindingDetails', - 'HybridInspectDlpJobRequest', - 'HybridInspectJobTriggerRequest', - 'HybridInspectResponse', - 'HybridInspectStatistics', - 'ImageLocation', - 'InfoTypeDescription', - 'InfoTypeStats', - 'InfoTypeTransformations', - 'InspectConfig', - 'InspectContentRequest', - 'InspectContentResponse', - 'InspectDataSourceDetails', - 'InspectionRule', - 'InspectionRuleSet', - 'InspectJobConfig', - 'InspectResult', - 'InspectTemplate', - 'JobTrigger', - 'KmsWrappedCryptoKey', - 'LargeCustomDictionaryConfig', - 'LargeCustomDictionaryStats', - 'ListDeidentifyTemplatesRequest', - 'ListDeidentifyTemplatesResponse', - 'ListDlpJobsRequest', - 'ListDlpJobsResponse', - 'ListInfoTypesRequest', - 'ListInfoTypesResponse', - 'ListInspectTemplatesRequest', - 'ListInspectTemplatesResponse', - 'ListJobTriggersRequest', - 'ListJobTriggersResponse', - 'ListStoredInfoTypesRequest', - 'ListStoredInfoTypesResponse', - 'Location', - 'Manual', - 'MetadataLocation', - 'OutputStorageConfig', - 'PrimitiveTransformation', - 'PrivacyMetric', - 'QuasiId', - 'QuoteInfo', - 'Range', - 'RecordCondition', - 'RecordLocation', - 'RecordSuppression', - 'RecordTransformations', - 'RedactConfig', - 'RedactImageRequest', - 'RedactImageResponse', - 'ReidentifyContentRequest', - 'ReidentifyContentResponse', - 'ReplaceValueConfig', - 'ReplaceWithInfoTypeConfig', - 'RiskAnalysisJobConfig', - 'Schedule', - 'StatisticalTable', - 'StorageMetadataLabel', - 'StoredInfoType', - 'StoredInfoTypeConfig', - 'StoredInfoTypeStats', - 'StoredInfoTypeVersion', - 'Table', - 'TableLocation', - 'TimePartConfig', - 'TransformationErrorHandling', - 'TransformationOverview', - 'TransformationSummary', - 'TransientCryptoKey', - 'UnwrappedCryptoKey', - 'UpdateDeidentifyTemplateRequest', - 'UpdateInspectTemplateRequest', - 'UpdateJobTriggerRequest', - 'UpdateStoredInfoTypeRequest', - 'Value', - 'ValueFrequency', - 'ContentOption', - 'DlpJobType', - 'InfoTypeSupportedBy', - 'MatchingType', - 'MetadataType', - 'RelationalOperator', - 'StoredInfoTypeState', - 'BigQueryField', - 'BigQueryKey', - 'BigQueryOptions', - 'BigQueryTable', - 'CloudStorageFileSet', - 'CloudStorageOptions', - 'CloudStoragePath', - 'CloudStorageRegexFileSet', - 'CustomInfoType', - 'DatastoreKey', - 'DatastoreOptions', - 'EntityId', - 'FieldId', - 'HybridOptions', - 'InfoType', - 'Key', - 'KindExpression', - 'PartitionId', - 'RecordKey', - 'StorageConfig', - 'StoredType', - 'TableOptions', - 'FileType', - 'Likelihood', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp/py.typed b/owl-bot-staging/v2/google/cloud/dlp/py.typed deleted file mode 100644 index 23d89ef3..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-dlp package uses inline types. diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py deleted file mode 100644 index d20b32f7..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/__init__.py +++ /dev/null @@ -1,334 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from .services.dlp_service import DlpServiceClient -from .services.dlp_service import DlpServiceAsyncClient - -from .types.dlp import Action -from .types.dlp import ActivateJobTriggerRequest -from .types.dlp import AnalyzeDataSourceRiskDetails -from .types.dlp import BoundingBox -from .types.dlp import BucketingConfig -from .types.dlp import ByteContentItem -from .types.dlp import CancelDlpJobRequest -from .types.dlp import CharacterMaskConfig -from .types.dlp import CharsToIgnore -from .types.dlp import Color -from .types.dlp import Container -from .types.dlp import ContentItem -from .types.dlp import ContentLocation -from .types.dlp import CreateDeidentifyTemplateRequest -from .types.dlp import CreateDlpJobRequest -from .types.dlp import CreateInspectTemplateRequest -from .types.dlp import CreateJobTriggerRequest -from .types.dlp import CreateStoredInfoTypeRequest -from .types.dlp import CryptoDeterministicConfig -from .types.dlp import CryptoHashConfig -from .types.dlp import CryptoKey -from .types.dlp import CryptoReplaceFfxFpeConfig -from .types.dlp import DateShiftConfig -from .types.dlp import DateTime -from .types.dlp import DeidentifyConfig -from .types.dlp import DeidentifyContentRequest -from .types.dlp import DeidentifyContentResponse -from .types.dlp import DeidentifyTemplate -from .types.dlp import DeleteDeidentifyTemplateRequest -from .types.dlp import DeleteDlpJobRequest -from .types.dlp import DeleteInspectTemplateRequest -from .types.dlp import DeleteJobTriggerRequest -from .types.dlp import DeleteStoredInfoTypeRequest -from .types.dlp import DlpJob -from .types.dlp import DocumentLocation -from .types.dlp import Error -from .types.dlp import ExcludeInfoTypes -from .types.dlp import ExclusionRule -from .types.dlp import FieldTransformation -from .types.dlp import Finding -from .types.dlp import FinishDlpJobRequest -from .types.dlp import FixedSizeBucketingConfig -from .types.dlp import GetDeidentifyTemplateRequest -from .types.dlp import GetDlpJobRequest -from .types.dlp import GetInspectTemplateRequest -from .types.dlp import GetJobTriggerRequest -from .types.dlp import GetStoredInfoTypeRequest -from .types.dlp import HybridContentItem -from .types.dlp import HybridFindingDetails -from .types.dlp import HybridInspectDlpJobRequest -from .types.dlp import HybridInspectJobTriggerRequest -from .types.dlp import HybridInspectResponse -from .types.dlp import HybridInspectStatistics -from .types.dlp import ImageLocation -from .types.dlp import InfoTypeDescription -from .types.dlp import InfoTypeStats -from .types.dlp import InfoTypeTransformations -from .types.dlp import InspectConfig -from .types.dlp import InspectContentRequest -from .types.dlp import InspectContentResponse -from .types.dlp import InspectDataSourceDetails -from .types.dlp import InspectionRule -from .types.dlp import InspectionRuleSet -from .types.dlp import InspectJobConfig -from .types.dlp import InspectResult -from .types.dlp import InspectTemplate -from .types.dlp import JobTrigger -from .types.dlp import KmsWrappedCryptoKey -from .types.dlp import LargeCustomDictionaryConfig -from .types.dlp import LargeCustomDictionaryStats -from .types.dlp import ListDeidentifyTemplatesRequest -from .types.dlp import ListDeidentifyTemplatesResponse -from .types.dlp import ListDlpJobsRequest -from .types.dlp import ListDlpJobsResponse -from .types.dlp import ListInfoTypesRequest -from .types.dlp import ListInfoTypesResponse -from .types.dlp import ListInspectTemplatesRequest -from .types.dlp import ListInspectTemplatesResponse -from .types.dlp import ListJobTriggersRequest -from .types.dlp import ListJobTriggersResponse -from .types.dlp import ListStoredInfoTypesRequest -from .types.dlp import ListStoredInfoTypesResponse -from .types.dlp import Location -from .types.dlp import Manual -from .types.dlp import MetadataLocation -from .types.dlp import OutputStorageConfig -from .types.dlp import PrimitiveTransformation -from .types.dlp import PrivacyMetric -from .types.dlp import QuasiId -from .types.dlp import QuoteInfo -from .types.dlp import Range -from .types.dlp import RecordCondition -from .types.dlp import RecordLocation -from .types.dlp import RecordSuppression -from .types.dlp import RecordTransformations -from .types.dlp import RedactConfig -from .types.dlp import RedactImageRequest -from .types.dlp import RedactImageResponse -from .types.dlp import ReidentifyContentRequest -from .types.dlp import ReidentifyContentResponse -from .types.dlp import ReplaceValueConfig -from .types.dlp import ReplaceWithInfoTypeConfig -from .types.dlp import RiskAnalysisJobConfig -from .types.dlp import Schedule -from .types.dlp import StatisticalTable -from .types.dlp import StorageMetadataLabel -from .types.dlp import StoredInfoType -from .types.dlp import StoredInfoTypeConfig -from .types.dlp import StoredInfoTypeStats -from .types.dlp import StoredInfoTypeVersion -from .types.dlp import Table -from .types.dlp import TableLocation -from .types.dlp import TimePartConfig -from .types.dlp import TransformationErrorHandling -from .types.dlp import TransformationOverview -from .types.dlp import TransformationSummary -from .types.dlp import TransientCryptoKey -from .types.dlp import UnwrappedCryptoKey -from .types.dlp import UpdateDeidentifyTemplateRequest -from .types.dlp import UpdateInspectTemplateRequest -from .types.dlp import UpdateJobTriggerRequest -from .types.dlp import UpdateStoredInfoTypeRequest -from .types.dlp import Value -from .types.dlp import ValueFrequency -from .types.dlp import ContentOption -from .types.dlp import DlpJobType -from .types.dlp import InfoTypeSupportedBy -from .types.dlp import MatchingType -from .types.dlp import MetadataType -from .types.dlp import RelationalOperator -from .types.dlp import StoredInfoTypeState -from .types.storage import BigQueryField -from .types.storage import BigQueryKey -from .types.storage import BigQueryOptions -from .types.storage import BigQueryTable -from .types.storage import CloudStorageFileSet -from .types.storage import CloudStorageOptions -from .types.storage import CloudStoragePath -from .types.storage import CloudStorageRegexFileSet -from .types.storage import CustomInfoType -from .types.storage import DatastoreKey -from .types.storage import DatastoreOptions -from .types.storage import EntityId -from .types.storage import FieldId -from .types.storage import HybridOptions -from .types.storage import InfoType -from .types.storage import Key -from .types.storage import KindExpression -from .types.storage import PartitionId -from .types.storage import RecordKey -from .types.storage import StorageConfig -from .types.storage import StoredType -from .types.storage import TableOptions -from .types.storage import FileType -from .types.storage import Likelihood - -__all__ = ( - 'DlpServiceAsyncClient', -'Action', -'ActivateJobTriggerRequest', -'AnalyzeDataSourceRiskDetails', -'BigQueryField', -'BigQueryKey', -'BigQueryOptions', -'BigQueryTable', -'BoundingBox', -'BucketingConfig', -'ByteContentItem', -'CancelDlpJobRequest', -'CharacterMaskConfig', -'CharsToIgnore', -'CloudStorageFileSet', -'CloudStorageOptions', -'CloudStoragePath', -'CloudStorageRegexFileSet', -'Color', -'Container', -'ContentItem', -'ContentLocation', -'ContentOption', -'CreateDeidentifyTemplateRequest', -'CreateDlpJobRequest', -'CreateInspectTemplateRequest', -'CreateJobTriggerRequest', -'CreateStoredInfoTypeRequest', -'CryptoDeterministicConfig', -'CryptoHashConfig', -'CryptoKey', -'CryptoReplaceFfxFpeConfig', -'CustomInfoType', -'DatastoreKey', -'DatastoreOptions', -'DateShiftConfig', -'DateTime', -'DeidentifyConfig', -'DeidentifyContentRequest', -'DeidentifyContentResponse', -'DeidentifyTemplate', -'DeleteDeidentifyTemplateRequest', -'DeleteDlpJobRequest', -'DeleteInspectTemplateRequest', -'DeleteJobTriggerRequest', -'DeleteStoredInfoTypeRequest', -'DlpJob', -'DlpJobType', -'DlpServiceClient', -'DocumentLocation', -'EntityId', -'Error', -'ExcludeInfoTypes', -'ExclusionRule', -'FieldId', -'FieldTransformation', -'FileType', -'Finding', -'FinishDlpJobRequest', -'FixedSizeBucketingConfig', -'GetDeidentifyTemplateRequest', -'GetDlpJobRequest', -'GetInspectTemplateRequest', -'GetJobTriggerRequest', -'GetStoredInfoTypeRequest', -'HybridContentItem', -'HybridFindingDetails', -'HybridInspectDlpJobRequest', -'HybridInspectJobTriggerRequest', -'HybridInspectResponse', -'HybridInspectStatistics', -'HybridOptions', -'ImageLocation', -'InfoType', -'InfoTypeDescription', -'InfoTypeStats', -'InfoTypeSupportedBy', -'InfoTypeTransformations', -'InspectConfig', -'InspectContentRequest', -'InspectContentResponse', -'InspectDataSourceDetails', -'InspectJobConfig', -'InspectResult', -'InspectTemplate', -'InspectionRule', -'InspectionRuleSet', -'JobTrigger', -'Key', -'KindExpression', -'KmsWrappedCryptoKey', -'LargeCustomDictionaryConfig', -'LargeCustomDictionaryStats', -'Likelihood', -'ListDeidentifyTemplatesRequest', -'ListDeidentifyTemplatesResponse', -'ListDlpJobsRequest', -'ListDlpJobsResponse', -'ListInfoTypesRequest', -'ListInfoTypesResponse', -'ListInspectTemplatesRequest', -'ListInspectTemplatesResponse', -'ListJobTriggersRequest', -'ListJobTriggersResponse', -'ListStoredInfoTypesRequest', -'ListStoredInfoTypesResponse', -'Location', -'Manual', -'MatchingType', -'MetadataLocation', -'MetadataType', -'OutputStorageConfig', -'PartitionId', -'PrimitiveTransformation', -'PrivacyMetric', -'QuasiId', -'QuoteInfo', -'Range', -'RecordCondition', -'RecordKey', -'RecordLocation', -'RecordSuppression', -'RecordTransformations', -'RedactConfig', -'RedactImageRequest', -'RedactImageResponse', -'ReidentifyContentRequest', -'ReidentifyContentResponse', -'RelationalOperator', -'ReplaceValueConfig', -'ReplaceWithInfoTypeConfig', -'RiskAnalysisJobConfig', -'Schedule', -'StatisticalTable', -'StorageConfig', -'StorageMetadataLabel', -'StoredInfoType', -'StoredInfoTypeConfig', -'StoredInfoTypeState', -'StoredInfoTypeStats', -'StoredInfoTypeVersion', -'StoredType', -'Table', -'TableLocation', -'TableOptions', -'TimePartConfig', -'TransformationErrorHandling', -'TransformationOverview', -'TransformationSummary', -'TransientCryptoKey', -'UnwrappedCryptoKey', -'UpdateDeidentifyTemplateRequest', -'UpdateInspectTemplateRequest', -'UpdateJobTriggerRequest', -'UpdateStoredInfoTypeRequest', -'Value', -'ValueFrequency', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json b/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json deleted file mode 100644 index df73928b..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/gapic_metadata.json +++ /dev/null @@ -1,363 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.dlp_v2", - "protoPackage": "google.privacy.dlp.v2", - "schema": "1.0", - "services": { - "DlpService": { - "clients": { - "grpc": { - "libraryClient": "DlpServiceClient", - "rpcs": { - "ActivateJobTrigger": { - "methods": [ - "activate_job_trigger" - ] - }, - "CancelDlpJob": { - "methods": [ - "cancel_dlp_job" - ] - }, - "CreateDeidentifyTemplate": { - "methods": [ - "create_deidentify_template" - ] - }, - "CreateDlpJob": { - "methods": [ - "create_dlp_job" - ] - }, - "CreateInspectTemplate": { - "methods": [ - "create_inspect_template" - ] - }, - "CreateJobTrigger": { - "methods": [ - "create_job_trigger" - ] - }, - "CreateStoredInfoType": { - "methods": [ - "create_stored_info_type" - ] - }, - "DeidentifyContent": { - "methods": [ - "deidentify_content" - ] - }, - "DeleteDeidentifyTemplate": { - "methods": [ - "delete_deidentify_template" - ] - }, - "DeleteDlpJob": { - "methods": [ - "delete_dlp_job" - ] - }, - "DeleteInspectTemplate": { - "methods": [ - "delete_inspect_template" - ] - }, - "DeleteJobTrigger": { - "methods": [ - "delete_job_trigger" - ] - }, - "DeleteStoredInfoType": { - "methods": [ - "delete_stored_info_type" - ] - }, - "FinishDlpJob": { - "methods": [ - "finish_dlp_job" - ] - }, - "GetDeidentifyTemplate": { - "methods": [ - "get_deidentify_template" - ] - }, - "GetDlpJob": { - "methods": [ - "get_dlp_job" - ] - }, - "GetInspectTemplate": { - "methods": [ - "get_inspect_template" - ] - }, - "GetJobTrigger": { - "methods": [ - "get_job_trigger" - ] - }, - "GetStoredInfoType": { - "methods": [ - "get_stored_info_type" - ] - }, - "HybridInspectDlpJob": { - "methods": [ - "hybrid_inspect_dlp_job" - ] - }, - "HybridInspectJobTrigger": { - "methods": [ - "hybrid_inspect_job_trigger" - ] - }, - "InspectContent": { - "methods": [ - "inspect_content" - ] - }, - "ListDeidentifyTemplates": { - "methods": [ - "list_deidentify_templates" - ] - }, - "ListDlpJobs": { - "methods": [ - "list_dlp_jobs" - ] - }, - "ListInfoTypes": { - "methods": [ - "list_info_types" - ] - }, - "ListInspectTemplates": { - "methods": [ - "list_inspect_templates" - ] - }, - "ListJobTriggers": { - "methods": [ - "list_job_triggers" - ] - }, - "ListStoredInfoTypes": { - "methods": [ - "list_stored_info_types" - ] - }, - "RedactImage": { - "methods": [ - "redact_image" - ] - }, - "ReidentifyContent": { - "methods": [ - "reidentify_content" - ] - }, - "UpdateDeidentifyTemplate": { - "methods": [ - "update_deidentify_template" - ] - }, - "UpdateInspectTemplate": { - "methods": [ - "update_inspect_template" - ] - }, - "UpdateJobTrigger": { - "methods": [ - "update_job_trigger" - ] - }, - "UpdateStoredInfoType": { - "methods": [ - "update_stored_info_type" - ] - } - } - }, - "grpc-async": { - "libraryClient": "DlpServiceAsyncClient", - "rpcs": { - "ActivateJobTrigger": { - "methods": [ - "activate_job_trigger" - ] - }, - "CancelDlpJob": { - "methods": [ - "cancel_dlp_job" - ] - }, - "CreateDeidentifyTemplate": { - "methods": [ - "create_deidentify_template" - ] - }, - "CreateDlpJob": { - "methods": [ - "create_dlp_job" - ] - }, - "CreateInspectTemplate": { - "methods": [ - "create_inspect_template" - ] - }, - "CreateJobTrigger": { - "methods": [ - "create_job_trigger" - ] - }, - "CreateStoredInfoType": { - "methods": [ - "create_stored_info_type" - ] - }, - "DeidentifyContent": { - "methods": [ - "deidentify_content" - ] - }, - "DeleteDeidentifyTemplate": { - "methods": [ - "delete_deidentify_template" - ] - }, - "DeleteDlpJob": { - "methods": [ - "delete_dlp_job" - ] - }, - "DeleteInspectTemplate": { - "methods": [ - "delete_inspect_template" - ] - }, - "DeleteJobTrigger": { - "methods": [ - "delete_job_trigger" - ] - }, - "DeleteStoredInfoType": { - "methods": [ - "delete_stored_info_type" - ] - }, - "FinishDlpJob": { - "methods": [ - "finish_dlp_job" - ] - }, - "GetDeidentifyTemplate": { - "methods": [ - "get_deidentify_template" - ] - }, - "GetDlpJob": { - "methods": [ - "get_dlp_job" - ] - }, - "GetInspectTemplate": { - "methods": [ - "get_inspect_template" - ] - }, - "GetJobTrigger": { - "methods": [ - "get_job_trigger" - ] - }, - "GetStoredInfoType": { - "methods": [ - "get_stored_info_type" - ] - }, - "HybridInspectDlpJob": { - "methods": [ - "hybrid_inspect_dlp_job" - ] - }, - "HybridInspectJobTrigger": { - "methods": [ - "hybrid_inspect_job_trigger" - ] - }, - "InspectContent": { - "methods": [ - "inspect_content" - ] - }, - "ListDeidentifyTemplates": { - "methods": [ - "list_deidentify_templates" - ] - }, - "ListDlpJobs": { - "methods": [ - "list_dlp_jobs" - ] - }, - "ListInfoTypes": { - "methods": [ - "list_info_types" - ] - }, - "ListInspectTemplates": { - "methods": [ - "list_inspect_templates" - ] - }, - "ListJobTriggers": { - "methods": [ - "list_job_triggers" - ] - }, - "ListStoredInfoTypes": { - "methods": [ - "list_stored_info_types" - ] - }, - "RedactImage": { - "methods": [ - "redact_image" - ] - }, - "ReidentifyContent": { - "methods": [ - "reidentify_content" - ] - }, - "UpdateDeidentifyTemplate": { - "methods": [ - "update_deidentify_template" - ] - }, - "UpdateInspectTemplate": { - "methods": [ - "update_inspect_template" - ] - }, - "UpdateJobTrigger": { - "methods": [ - "update_job_trigger" - ] - }, - "UpdateStoredInfoType": { - "methods": [ - "update_stored_info_type" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/py.typed b/owl-bot-staging/v2/google/cloud/dlp_v2/py.typed deleted file mode 100644 index 23d89ef3..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-dlp package uses inline types. diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py deleted file mode 100644 index 4de65971..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py deleted file mode 100644 index 161801ef..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import DlpServiceClient -from .async_client import DlpServiceAsyncClient - -__all__ = ( - 'DlpServiceClient', - 'DlpServiceAsyncClient', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py deleted file mode 100644 index efcf4735..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/async_client.py +++ /dev/null @@ -1,3237 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Sequence, Tuple, Type, Union -import pkg_resources - -import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.dlp_v2.services.dlp_service import pagers -from google.cloud.dlp_v2.types import dlp -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DlpServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import DlpServiceGrpcAsyncIOTransport -from .client import DlpServiceClient - - -class DlpServiceAsyncClient: - """The Cloud Data Loss Prevention (DLP) API is a service that - allows clients to detect the presence of Personally Identifiable - Information (PII) and other privacy-sensitive data in user- - supplied, unstructured data streams, like text blocks or images. - The service also includes methods for sensitive data redaction - and scheduling of data scans on Google Cloud Platform based data - sets. - To learn more about concepts and find how-to guides see - https://cloud.google.com/dlp/docs/. - """ - - _client: DlpServiceClient - - DEFAULT_ENDPOINT = DlpServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = DlpServiceClient.DEFAULT_MTLS_ENDPOINT - - deidentify_template_path = staticmethod(DlpServiceClient.deidentify_template_path) - parse_deidentify_template_path = staticmethod(DlpServiceClient.parse_deidentify_template_path) - dlp_content_path = staticmethod(DlpServiceClient.dlp_content_path) - parse_dlp_content_path = staticmethod(DlpServiceClient.parse_dlp_content_path) - dlp_job_path = staticmethod(DlpServiceClient.dlp_job_path) - parse_dlp_job_path = staticmethod(DlpServiceClient.parse_dlp_job_path) - finding_path = staticmethod(DlpServiceClient.finding_path) - parse_finding_path = staticmethod(DlpServiceClient.parse_finding_path) - inspect_template_path = staticmethod(DlpServiceClient.inspect_template_path) - parse_inspect_template_path = staticmethod(DlpServiceClient.parse_inspect_template_path) - job_trigger_path = staticmethod(DlpServiceClient.job_trigger_path) - parse_job_trigger_path = staticmethod(DlpServiceClient.parse_job_trigger_path) - stored_info_type_path = staticmethod(DlpServiceClient.stored_info_type_path) - parse_stored_info_type_path = staticmethod(DlpServiceClient.parse_stored_info_type_path) - common_billing_account_path = staticmethod(DlpServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(DlpServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(DlpServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(DlpServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(DlpServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(DlpServiceClient.parse_common_organization_path) - common_project_path = staticmethod(DlpServiceClient.common_project_path) - parse_common_project_path = staticmethod(DlpServiceClient.parse_common_project_path) - common_location_path = staticmethod(DlpServiceClient.common_location_path) - parse_common_location_path = staticmethod(DlpServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DlpServiceAsyncClient: The constructed client. - """ - return DlpServiceClient.from_service_account_info.__func__(DlpServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DlpServiceAsyncClient: The constructed client. - """ - return DlpServiceClient.from_service_account_file.__func__(DlpServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> DlpServiceTransport: - """Returns the transport used by the client instance. - - Returns: - DlpServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(DlpServiceClient).get_transport_class, type(DlpServiceClient)) - - def __init__(self, *, - credentials: ga_credentials.Credentials = None, - transport: Union[str, DlpServiceTransport] = "grpc_asyncio", - client_options: ClientOptions = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the dlp service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.DlpServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = DlpServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def inspect_content(self, - request: dlp.InspectContentRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectContentResponse: - r"""Finds potentially sensitive info in content. - This method has limits on input size, processing time, - and output size. - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - For how to guides, see - https://cloud.google.com/dlp/docs/inspecting-images and - https://cloud.google.com/dlp/docs/inspecting-text, - - Args: - request (:class:`google.cloud.dlp_v2.types.InspectContentRequest`): - The request object. Request to search for potentially - sensitive info in a ContentItem. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectContentResponse: - Results of inspecting an item. - """ - # Create or coerce a protobuf request object. - request = dlp.InspectContentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.inspect_content, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def redact_image(self, - request: dlp.RedactImageRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.RedactImageResponse: - r"""Redacts potentially sensitive info from an image. - This method has limits on input size, processing time, - and output size. See - https://cloud.google.com/dlp/docs/redacting-sensitive- - data-images to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Args: - request (:class:`google.cloud.dlp_v2.types.RedactImageRequest`): - The request object. Request to search for potentially - sensitive info in an image and redact it by covering it - with a colored rectangle. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.RedactImageResponse: - Results of redacting an image. - """ - # Create or coerce a protobuf request object. - request = dlp.RedactImageRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.redact_image, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def deidentify_content(self, - request: dlp.DeidentifyContentRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyContentResponse: - r"""De-identifies potentially sensitive info from a - ContentItem. This method has limits on input size and - output size. See - https://cloud.google.com/dlp/docs/deidentify-sensitive- - data to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Args: - request (:class:`google.cloud.dlp_v2.types.DeidentifyContentRequest`): - The request object. Request to de-identify a list of - items. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyContentResponse: - Results of de-identifying a - ContentItem. - - """ - # Create or coerce a protobuf request object. - request = dlp.DeidentifyContentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.deidentify_content, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def reidentify_content(self, - request: dlp.ReidentifyContentRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.ReidentifyContentResponse: - r"""Re-identifies content that has been de-identified. See - https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example - to learn more. - - Args: - request (:class:`google.cloud.dlp_v2.types.ReidentifyContentRequest`): - The request object. Request to re-identify an item. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.ReidentifyContentResponse: - Results of re-identifying a item. - """ - # Create or coerce a protobuf request object. - request = dlp.ReidentifyContentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.reidentify_content, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_info_types(self, - request: dlp.ListInfoTypesRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.ListInfoTypesResponse: - r"""Returns a list of the sensitive information types - that the DLP API supports. See - https://cloud.google.com/dlp/docs/infotypes-reference to - learn more. - - Args: - request (:class:`google.cloud.dlp_v2.types.ListInfoTypesRequest`): - The request object. Request for the list of infoTypes. - parent (:class:`str`): - The parent resource name. - - The format of this value is as follows: - - :: - - locations/LOCATION_ID - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.ListInfoTypesResponse: - Response to the ListInfoTypes - request. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.ListInfoTypesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_info_types, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_inspect_template(self, - request: dlp.CreateInspectTemplateRequest = None, - *, - parent: str = None, - inspect_template: dlp.InspectTemplate = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectTemplate: - r"""Creates an InspectTemplate for re-using frequently - used configuration for inspecting content, images, and - storage. See https://cloud.google.com/dlp/docs/creating- - templates to learn more. - - Args: - request (:class:`google.cloud.dlp_v2.types.CreateInspectTemplateRequest`): - The request object. Request message for - CreateInspectTemplate. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - inspect_template (:class:`google.cloud.dlp_v2.types.InspectTemplate`): - Required. The InspectTemplate to - create. - - This corresponds to the ``inspect_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/dlp/docs/concepts- - templates to learn more. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, inspect_template]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.CreateInspectTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if inspect_template is not None: - request.inspect_template = inspect_template - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_inspect_template, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_inspect_template(self, - request: dlp.UpdateInspectTemplateRequest = None, - *, - name: str = None, - inspect_template: dlp.InspectTemplate = None, - update_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectTemplate: - r"""Updates the InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Args: - request (:class:`google.cloud.dlp_v2.types.UpdateInspectTemplateRequest`): - The request object. Request message for - UpdateInspectTemplate. - name (:class:`str`): - Required. Resource name of organization and - inspectTemplate to be updated, for example - ``organizations/433245324/inspectTemplates/432452342`` - or projects/project-id/inspectTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - inspect_template (:class:`google.cloud.dlp_v2.types.InspectTemplate`): - New InspectTemplate value. - This corresponds to the ``inspect_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/dlp/docs/concepts- - templates to learn more. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, inspect_template, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.UpdateInspectTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if inspect_template is not None: - request.inspect_template = inspect_template - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_inspect_template, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_inspect_template(self, - request: dlp.GetInspectTemplateRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectTemplate: - r"""Gets an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Args: - request (:class:`google.cloud.dlp_v2.types.GetInspectTemplateRequest`): - The request object. Request message for - GetInspectTemplate. - name (:class:`str`): - Required. Resource name of the organization and - inspectTemplate to be read, for example - ``organizations/433245324/inspectTemplates/432452342`` - or projects/project-id/inspectTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/dlp/docs/concepts- - templates to learn more. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.GetInspectTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_inspect_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_inspect_templates(self, - request: dlp.ListInspectTemplatesRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListInspectTemplatesAsyncPager: - r"""Lists InspectTemplates. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Args: - request (:class:`google.cloud.dlp_v2.types.ListInspectTemplatesRequest`): - The request object. Request message for - ListInspectTemplates. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesAsyncPager: - Response message for - ListInspectTemplates. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.ListInspectTemplatesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_inspect_templates, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListInspectTemplatesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_inspect_template(self, - request: dlp.DeleteInspectTemplateRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Args: - request (:class:`google.cloud.dlp_v2.types.DeleteInspectTemplateRequest`): - The request object. Request message for - DeleteInspectTemplate. - name (:class:`str`): - Required. Resource name of the organization and - inspectTemplate to be deleted, for example - ``organizations/433245324/inspectTemplates/432452342`` - or projects/project-id/inspectTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.DeleteInspectTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_inspect_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def create_deidentify_template(self, - request: dlp.CreateDeidentifyTemplateRequest = None, - *, - parent: str = None, - deidentify_template: dlp.DeidentifyTemplate = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyTemplate: - r"""Creates a DeidentifyTemplate for re-using frequently - used configuration for de-identifying content, images, - and storage. See - https://cloud.google.com/dlp/docs/creating-templates- - deid to learn more. - - Args: - request (:class:`google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest`): - The request object. Request message for - CreateDeidentifyTemplate. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - deidentify_template (:class:`google.cloud.dlp_v2.types.DeidentifyTemplate`): - Required. The DeidentifyTemplate to - create. - - This corresponds to the ``deidentify_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts- - templates to learn more. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, deidentify_template]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.CreateDeidentifyTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if deidentify_template is not None: - request.deidentify_template = deidentify_template - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_deidentify_template, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_deidentify_template(self, - request: dlp.UpdateDeidentifyTemplateRequest = None, - *, - name: str = None, - deidentify_template: dlp.DeidentifyTemplate = None, - update_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyTemplate: - r"""Updates the DeidentifyTemplate. - See https://cloud.google.com/dlp/docs/creating- - templates-deid to learn more. - - Args: - request (:class:`google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest`): - The request object. Request message for - UpdateDeidentifyTemplate. - name (:class:`str`): - Required. Resource name of organization and deidentify - template to be updated, for example - ``organizations/433245324/deidentifyTemplates/432452342`` - or projects/project-id/deidentifyTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - deidentify_template (:class:`google.cloud.dlp_v2.types.DeidentifyTemplate`): - New DeidentifyTemplate value. - This corresponds to the ``deidentify_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts- - templates to learn more. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, deidentify_template, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.UpdateDeidentifyTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if deidentify_template is not None: - request.deidentify_template = deidentify_template - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_deidentify_template, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_deidentify_template(self, - request: dlp.GetDeidentifyTemplateRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyTemplate: - r"""Gets a DeidentifyTemplate. - See https://cloud.google.com/dlp/docs/creating- - templates-deid to learn more. - - Args: - request (:class:`google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest`): - The request object. Request message for - GetDeidentifyTemplate. - name (:class:`str`): - Required. Resource name of the organization and - deidentify template to be read, for example - ``organizations/433245324/deidentifyTemplates/432452342`` - or projects/project-id/deidentifyTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts- - templates to learn more. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.GetDeidentifyTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_deidentify_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_deidentify_templates(self, - request: dlp.ListDeidentifyTemplatesRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDeidentifyTemplatesAsyncPager: - r"""Lists DeidentifyTemplates. - See https://cloud.google.com/dlp/docs/creating- - templates-deid to learn more. - - Args: - request (:class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest`): - The request object. Request message for - ListDeidentifyTemplates. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesAsyncPager: - Response message for - ListDeidentifyTemplates. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.ListDeidentifyTemplatesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_deidentify_templates, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDeidentifyTemplatesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_deidentify_template(self, - request: dlp.DeleteDeidentifyTemplateRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a DeidentifyTemplate. - See https://cloud.google.com/dlp/docs/creating- - templates-deid to learn more. - - Args: - request (:class:`google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest`): - The request object. Request message for - DeleteDeidentifyTemplate. - name (:class:`str`): - Required. Resource name of the organization and - deidentify template to be deleted, for example - ``organizations/433245324/deidentifyTemplates/432452342`` - or projects/project-id/deidentifyTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.DeleteDeidentifyTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_deidentify_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def create_job_trigger(self, - request: dlp.CreateJobTriggerRequest = None, - *, - parent: str = None, - job_trigger: dlp.JobTrigger = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.JobTrigger: - r"""Creates a job trigger to run DLP actions such as - scanning storage for sensitive information on a set - schedule. See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Args: - request (:class:`google.cloud.dlp_v2.types.CreateJobTriggerRequest`): - The request object. Request message for - CreateJobTrigger. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_trigger (:class:`google.cloud.dlp_v2.types.JobTrigger`): - Required. The JobTrigger to create. - This corresponds to the ``job_trigger`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make dlp - api calls on a repeating basis. See - https://cloud.google.com/dlp/docs/concepts- - job-triggers to learn more. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, job_trigger]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.CreateJobTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job_trigger is not None: - request.job_trigger = job_trigger - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_job_trigger, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_job_trigger(self, - request: dlp.UpdateJobTriggerRequest = None, - *, - name: str = None, - job_trigger: dlp.JobTrigger = None, - update_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.JobTrigger: - r"""Updates a job trigger. - See https://cloud.google.com/dlp/docs/creating-job- - triggers to learn more. - - Args: - request (:class:`google.cloud.dlp_v2.types.UpdateJobTriggerRequest`): - The request object. Request message for - UpdateJobTrigger. - name (:class:`str`): - Required. Resource name of the project and the - triggeredJob, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_trigger (:class:`google.cloud.dlp_v2.types.JobTrigger`): - New JobTrigger value. - This corresponds to the ``job_trigger`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make dlp - api calls on a repeating basis. See - https://cloud.google.com/dlp/docs/concepts- - job-triggers to learn more. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, job_trigger, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.UpdateJobTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if job_trigger is not None: - request.job_trigger = job_trigger - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_job_trigger, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def hybrid_inspect_job_trigger(self, - request: dlp.HybridInspectJobTriggerRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.HybridInspectResponse: - r"""Inspect hybrid content and store findings to a - trigger. The inspection will be processed - asynchronously. To review the findings monitor the jobs - within the trigger. - Early access feature is in a pre-release state and might - change or have limited support. For more information, - see - https://cloud.google.com/products#product-launch-stages. - - Args: - request (:class:`google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest`): - The request object. Request to search for potentially - sensitive info in a custom location. - name (:class:`str`): - Required. Resource name of the trigger to execute a - hybrid inspect on, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.HybridInspectResponse: - Quota exceeded errors will be thrown - once quota has been met. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.HybridInspectJobTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.hybrid_inspect_job_trigger, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_job_trigger(self, - request: dlp.GetJobTriggerRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.JobTrigger: - r"""Gets a job trigger. - See https://cloud.google.com/dlp/docs/creating-job- - triggers to learn more. - - Args: - request (:class:`google.cloud.dlp_v2.types.GetJobTriggerRequest`): - The request object. Request message for GetJobTrigger. - name (:class:`str`): - Required. Resource name of the project and the - triggeredJob, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make dlp - api calls on a repeating basis. See - https://cloud.google.com/dlp/docs/concepts- - job-triggers to learn more. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.GetJobTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_job_trigger, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_job_triggers(self, - request: dlp.ListJobTriggersRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobTriggersAsyncPager: - r"""Lists job triggers. - See https://cloud.google.com/dlp/docs/creating-job- - triggers to learn more. - - Args: - request (:class:`google.cloud.dlp_v2.types.ListJobTriggersRequest`): - The request object. Request message for ListJobTriggers. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersAsyncPager: - Response message for ListJobTriggers. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.ListJobTriggersRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_job_triggers, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListJobTriggersAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_job_trigger(self, - request: dlp.DeleteJobTriggerRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a job trigger. - See https://cloud.google.com/dlp/docs/creating-job- - triggers to learn more. - - Args: - request (:class:`google.cloud.dlp_v2.types.DeleteJobTriggerRequest`): - The request object. Request message for - DeleteJobTrigger. - name (:class:`str`): - Required. Resource name of the project and the - triggeredJob, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.DeleteJobTriggerRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_job_trigger, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def activate_job_trigger(self, - request: dlp.ActivateJobTriggerRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DlpJob: - r"""Activate a job trigger. Causes the immediate execute - of a trigger instead of waiting on the trigger event to - occur. - - Args: - request (:class:`google.cloud.dlp_v2.types.ActivateJobTriggerRequest`): - The request object. Request message for - ActivateJobTrigger. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DlpJob: - Combines all of the information about - a DLP job. - - """ - # Create or coerce a protobuf request object. - request = dlp.ActivateJobTriggerRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.activate_job_trigger, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def create_dlp_job(self, - request: dlp.CreateDlpJobRequest = None, - *, - parent: str = None, - inspect_job: dlp.InspectJobConfig = None, - risk_job: dlp.RiskAnalysisJobConfig = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DlpJob: - r"""Creates a new job to inspect storage or calculate - risk metrics. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - When no InfoTypes or CustomInfoTypes are specified in - inspect jobs, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Args: - request (:class:`google.cloud.dlp_v2.types.CreateDlpJobRequest`): - The request object. Request message for - CreateDlpJobRequest. Used to initiate long running jobs - such as calculating risk metrics or inspecting Google - Cloud Storage. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - inspect_job (:class:`google.cloud.dlp_v2.types.InspectJobConfig`): - Set to control what and how to - inspect. - - This corresponds to the ``inspect_job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - risk_job (:class:`google.cloud.dlp_v2.types.RiskAnalysisJobConfig`): - Set to choose what metric to - calculate. - - This corresponds to the ``risk_job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DlpJob: - Combines all of the information about - a DLP job. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, inspect_job, risk_job]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.CreateDlpJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if inspect_job is not None: - request.inspect_job = inspect_job - if risk_job is not None: - request.risk_job = risk_job - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_dlp_job, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_dlp_jobs(self, - request: dlp.ListDlpJobsRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDlpJobsAsyncPager: - r"""Lists DlpJobs that match the specified filter in the - request. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - Args: - request (:class:`google.cloud.dlp_v2.types.ListDlpJobsRequest`): - The request object. The request message for listing DLP - jobs. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsAsyncPager: - The response message for listing DLP - jobs. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.ListDlpJobsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_dlp_jobs, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListDlpJobsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_dlp_job(self, - request: dlp.GetDlpJobRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DlpJob: - r"""Gets the latest state of a long-running DlpJob. - See https://cloud.google.com/dlp/docs/inspecting-storage - and https://cloud.google.com/dlp/docs/compute-risk- - analysis to learn more. - - Args: - request (:class:`google.cloud.dlp_v2.types.GetDlpJobRequest`): - The request object. The request message for - [DlpJobs.GetDlpJob][]. - name (:class:`str`): - Required. The name of the DlpJob - resource. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DlpJob: - Combines all of the information about - a DLP job. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.GetDlpJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_dlp_job, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_dlp_job(self, - request: dlp.DeleteDlpJobRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running DlpJob. This method indicates - that the client is no longer interested in the DlpJob - result. The job will be cancelled if possible. - See https://cloud.google.com/dlp/docs/inspecting-storage - and https://cloud.google.com/dlp/docs/compute-risk- - analysis to learn more. - - Args: - request (:class:`google.cloud.dlp_v2.types.DeleteDlpJobRequest`): - The request object. The request message for deleting a - DLP job. - name (:class:`str`): - Required. The name of the DlpJob - resource to be deleted. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.DeleteDlpJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_dlp_job, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def cancel_dlp_job(self, - request: dlp.CancelDlpJobRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running - DlpJob. The server makes a best effort to cancel the - DlpJob, but success is not guaranteed. - See https://cloud.google.com/dlp/docs/inspecting-storage - and https://cloud.google.com/dlp/docs/compute-risk- - analysis to learn more. - - Args: - request (:class:`google.cloud.dlp_v2.types.CancelDlpJobRequest`): - The request object. The request message for canceling a - DLP job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - request = dlp.CancelDlpJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.cancel_dlp_job, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def create_stored_info_type(self, - request: dlp.CreateStoredInfoTypeRequest = None, - *, - parent: str = None, - config: dlp.StoredInfoTypeConfig = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.StoredInfoType: - r"""Creates a pre-built stored infoType to be used for - inspection. See - https://cloud.google.com/dlp/docs/creating-stored- - infotypes to learn more. - - Args: - request (:class:`google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest`): - The request object. Request message for - CreateStoredInfoType. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - config (:class:`google.cloud.dlp_v2.types.StoredInfoTypeConfig`): - Required. Configuration of the - storedInfoType to create. - - This corresponds to the ``config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, config]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.CreateStoredInfoTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if config is not None: - request.config = config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_stored_info_type, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def update_stored_info_type(self, - request: dlp.UpdateStoredInfoTypeRequest = None, - *, - name: str = None, - config: dlp.StoredInfoTypeConfig = None, - update_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.StoredInfoType: - r"""Updates the stored infoType by creating a new - version. The existing version will continue to be used - until the new version is ready. See - https://cloud.google.com/dlp/docs/creating-stored- - infotypes to learn more. - - Args: - request (:class:`google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest`): - The request object. Request message for - UpdateStoredInfoType. - name (:class:`str`): - Required. Resource name of organization and - storedInfoType to be updated, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - config (:class:`google.cloud.dlp_v2.types.StoredInfoTypeConfig`): - Updated configuration for the - storedInfoType. If not provided, a new - version of the storedInfoType will be - created with the existing configuration. - - This corresponds to the ``config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, config, update_mask]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.UpdateStoredInfoTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if config is not None: - request.config = config - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.update_stored_info_type, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_stored_info_type(self, - request: dlp.GetStoredInfoTypeRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.StoredInfoType: - r"""Gets a stored infoType. - See https://cloud.google.com/dlp/docs/creating-stored- - infotypes to learn more. - - Args: - request (:class:`google.cloud.dlp_v2.types.GetStoredInfoTypeRequest`): - The request object. Request message for - GetStoredInfoType. - name (:class:`str`): - Required. Resource name of the organization and - storedInfoType to be read, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.GetStoredInfoTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_stored_info_type, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_stored_info_types(self, - request: dlp.ListStoredInfoTypesRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListStoredInfoTypesAsyncPager: - r"""Lists stored infoTypes. - See https://cloud.google.com/dlp/docs/creating-stored- - infotypes to learn more. - - Args: - request (:class:`google.cloud.dlp_v2.types.ListStoredInfoTypesRequest`): - The request object. Request message for - ListStoredInfoTypes. - parent (:class:`str`): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesAsyncPager: - Response message for - ListStoredInfoTypes. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.ListStoredInfoTypesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_stored_info_types, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListStoredInfoTypesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_stored_info_type(self, - request: dlp.DeleteStoredInfoTypeRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a stored infoType. - See https://cloud.google.com/dlp/docs/creating-stored- - infotypes to learn more. - - Args: - request (:class:`google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest`): - The request object. Request message for - DeleteStoredInfoType. - name (:class:`str`): - Required. Resource name of the organization and - storedInfoType to be deleted, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.DeleteStoredInfoTypeRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_stored_info_type, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def hybrid_inspect_dlp_job(self, - request: dlp.HybridInspectDlpJobRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.HybridInspectResponse: - r"""Inspect hybrid content and store findings to a job. - To review the findings inspect the job. Inspection will - occur asynchronously. - Early access feature is in a pre-release state and might - change or have limited support. For more information, - see - https://cloud.google.com/products#product-launch-stages. - - Args: - request (:class:`google.cloud.dlp_v2.types.HybridInspectDlpJobRequest`): - The request object. Request to search for potentially - sensitive info in a custom location. - name (:class:`str`): - Required. Resource name of the job to execute a hybrid - inspect on, for example - ``projects/dlp-test-project/dlpJob/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.HybridInspectResponse: - Quota exceeded errors will be thrown - once quota has been met. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = dlp.HybridInspectDlpJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.hybrid_inspect_dlp_job, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def finish_dlp_job(self, - request: dlp.FinishDlpJobRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Finish a running hybrid DlpJob. Triggers the - finalization steps and running of any enabled actions - that have not yet run. Early access feature is in a pre- - release state and might change or have limited support. - For more information, see - https://cloud.google.com/products#product-launch-stages. - - Args: - request (:class:`google.cloud.dlp_v2.types.FinishDlpJobRequest`): - The request object. The request message for finishing a - DLP hybrid job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - request = dlp.FinishDlpJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.finish_dlp_job, - default_timeout=300.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-dlp", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "DlpServiceAsyncClient", -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py deleted file mode 100644 index 288258aa..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/client.py +++ /dev/null @@ -1,3345 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from distutils import util -import os -import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union -import pkg_resources - -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.dlp_v2.services.dlp_service import pagers -from google.cloud.dlp_v2.types import dlp -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import DlpServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import DlpServiceGrpcTransport -from .transports.grpc_asyncio import DlpServiceGrpcAsyncIOTransport - - -class DlpServiceClientMeta(type): - """Metaclass for the DlpService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[DlpServiceTransport]] - _transport_registry["grpc"] = DlpServiceGrpcTransport - _transport_registry["grpc_asyncio"] = DlpServiceGrpcAsyncIOTransport - - def get_transport_class(cls, - label: str = None, - ) -> Type[DlpServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class DlpServiceClient(metaclass=DlpServiceClientMeta): - """The Cloud Data Loss Prevention (DLP) API is a service that - allows clients to detect the presence of Personally Identifiable - Information (PII) and other privacy-sensitive data in user- - supplied, unstructured data streams, like text blocks or images. - The service also includes methods for sensitive data redaction - and scheduling of data scans on Google Cloud Platform based data - sets. - To learn more about concepts and find how-to guides see - https://cloud.google.com/dlp/docs/. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "dlp.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DlpServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DlpServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> DlpServiceTransport: - """Returns the transport used by the client instance. - - Returns: - DlpServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def deidentify_template_path(organization: str,deidentify_template: str,) -> str: - """Returns a fully-qualified deidentify_template string.""" - return "organizations/{organization}/deidentifyTemplates/{deidentify_template}".format(organization=organization, deidentify_template=deidentify_template, ) - - @staticmethod - def parse_deidentify_template_path(path: str) -> Dict[str,str]: - """Parses a deidentify_template path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)/deidentifyTemplates/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def dlp_content_path(project: str,) -> str: - """Returns a fully-qualified dlp_content string.""" - return "projects/{project}/dlpContent".format(project=project, ) - - @staticmethod - def parse_dlp_content_path(path: str) -> Dict[str,str]: - """Parses a dlp_content path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/dlpContent$", path) - return m.groupdict() if m else {} - - @staticmethod - def dlp_job_path(project: str,dlp_job: str,) -> str: - """Returns a fully-qualified dlp_job string.""" - return "projects/{project}/dlpJobs/{dlp_job}".format(project=project, dlp_job=dlp_job, ) - - @staticmethod - def parse_dlp_job_path(path: str) -> Dict[str,str]: - """Parses a dlp_job path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/dlpJobs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def finding_path(project: str,location: str,finding: str,) -> str: - """Returns a fully-qualified finding string.""" - return "projects/{project}/locations/{location}/findings/{finding}".format(project=project, location=location, finding=finding, ) - - @staticmethod - def parse_finding_path(path: str) -> Dict[str,str]: - """Parses a finding path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/findings/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def inspect_template_path(organization: str,inspect_template: str,) -> str: - """Returns a fully-qualified inspect_template string.""" - return "organizations/{organization}/inspectTemplates/{inspect_template}".format(organization=organization, inspect_template=inspect_template, ) - - @staticmethod - def parse_inspect_template_path(path: str) -> Dict[str,str]: - """Parses a inspect_template path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)/inspectTemplates/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def job_trigger_path(project: str,job_trigger: str,) -> str: - """Returns a fully-qualified job_trigger string.""" - return "projects/{project}/jobTriggers/{job_trigger}".format(project=project, job_trigger=job_trigger, ) - - @staticmethod - def parse_job_trigger_path(path: str) -> Dict[str,str]: - """Parses a job_trigger path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/jobTriggers/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def stored_info_type_path(organization: str,stored_info_type: str,) -> str: - """Returns a fully-qualified stored_info_type string.""" - return "organizations/{organization}/storedInfoTypes/{stored_info_type}".format(organization=organization, stored_info_type=stored_info_type, ) - - @staticmethod - def parse_stored_info_type_path(path: str) -> Dict[str,str]: - """Parses a stored_info_type path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)/storedInfoTypes/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, DlpServiceTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the dlp service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, DlpServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - - # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) - - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, DlpServiceTransport): - # transport is a DlpServiceTransport instance. - if credentials or client_options.credentials_file: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - ) - - def inspect_content(self, - request: dlp.InspectContentRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectContentResponse: - r"""Finds potentially sensitive info in content. - This method has limits on input size, processing time, - and output size. - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - For how to guides, see - https://cloud.google.com/dlp/docs/inspecting-images and - https://cloud.google.com/dlp/docs/inspecting-text, - - Args: - request (google.cloud.dlp_v2.types.InspectContentRequest): - The request object. Request to search for potentially - sensitive info in a ContentItem. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectContentResponse: - Results of inspecting an item. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a dlp.InspectContentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.InspectContentRequest): - request = dlp.InspectContentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.inspect_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def redact_image(self, - request: dlp.RedactImageRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.RedactImageResponse: - r"""Redacts potentially sensitive info from an image. - This method has limits on input size, processing time, - and output size. See - https://cloud.google.com/dlp/docs/redacting-sensitive- - data-images to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Args: - request (google.cloud.dlp_v2.types.RedactImageRequest): - The request object. Request to search for potentially - sensitive info in an image and redact it by covering it - with a colored rectangle. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.RedactImageResponse: - Results of redacting an image. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a dlp.RedactImageRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.RedactImageRequest): - request = dlp.RedactImageRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.redact_image] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def deidentify_content(self, - request: dlp.DeidentifyContentRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyContentResponse: - r"""De-identifies potentially sensitive info from a - ContentItem. This method has limits on input size and - output size. See - https://cloud.google.com/dlp/docs/deidentify-sensitive- - data to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Args: - request (google.cloud.dlp_v2.types.DeidentifyContentRequest): - The request object. Request to de-identify a list of - items. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyContentResponse: - Results of de-identifying a - ContentItem. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a dlp.DeidentifyContentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.DeidentifyContentRequest): - request = dlp.DeidentifyContentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.deidentify_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def reidentify_content(self, - request: dlp.ReidentifyContentRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.ReidentifyContentResponse: - r"""Re-identifies content that has been de-identified. See - https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example - to learn more. - - Args: - request (google.cloud.dlp_v2.types.ReidentifyContentRequest): - The request object. Request to re-identify an item. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.ReidentifyContentResponse: - Results of re-identifying a item. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ReidentifyContentRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ReidentifyContentRequest): - request = dlp.ReidentifyContentRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.reidentify_content] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_info_types(self, - request: dlp.ListInfoTypesRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.ListInfoTypesResponse: - r"""Returns a list of the sensitive information types - that the DLP API supports. See - https://cloud.google.com/dlp/docs/infotypes-reference to - learn more. - - Args: - request (google.cloud.dlp_v2.types.ListInfoTypesRequest): - The request object. Request for the list of infoTypes. - parent (str): - The parent resource name. - - The format of this value is as follows: - - :: - - locations/LOCATION_ID - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.ListInfoTypesResponse: - Response to the ListInfoTypes - request. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ListInfoTypesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ListInfoTypesRequest): - request = dlp.ListInfoTypesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_info_types] - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_inspect_template(self, - request: dlp.CreateInspectTemplateRequest = None, - *, - parent: str = None, - inspect_template: dlp.InspectTemplate = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectTemplate: - r"""Creates an InspectTemplate for re-using frequently - used configuration for inspecting content, images, and - storage. See https://cloud.google.com/dlp/docs/creating- - templates to learn more. - - Args: - request (google.cloud.dlp_v2.types.CreateInspectTemplateRequest): - The request object. Request message for - CreateInspectTemplate. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - inspect_template (google.cloud.dlp_v2.types.InspectTemplate): - Required. The InspectTemplate to - create. - - This corresponds to the ``inspect_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/dlp/docs/concepts- - templates to learn more. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, inspect_template]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.CreateInspectTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.CreateInspectTemplateRequest): - request = dlp.CreateInspectTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if inspect_template is not None: - request.inspect_template = inspect_template - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_inspect_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_inspect_template(self, - request: dlp.UpdateInspectTemplateRequest = None, - *, - name: str = None, - inspect_template: dlp.InspectTemplate = None, - update_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectTemplate: - r"""Updates the InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Args: - request (google.cloud.dlp_v2.types.UpdateInspectTemplateRequest): - The request object. Request message for - UpdateInspectTemplate. - name (str): - Required. Resource name of organization and - inspectTemplate to be updated, for example - ``organizations/433245324/inspectTemplates/432452342`` - or projects/project-id/inspectTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - inspect_template (google.cloud.dlp_v2.types.InspectTemplate): - New InspectTemplate value. - This corresponds to the ``inspect_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/dlp/docs/concepts- - templates to learn more. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, inspect_template, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.UpdateInspectTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.UpdateInspectTemplateRequest): - request = dlp.UpdateInspectTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if inspect_template is not None: - request.inspect_template = inspect_template - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_inspect_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_inspect_template(self, - request: dlp.GetInspectTemplateRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.InspectTemplate: - r"""Gets an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Args: - request (google.cloud.dlp_v2.types.GetInspectTemplateRequest): - The request object. Request message for - GetInspectTemplate. - name (str): - Required. Resource name of the organization and - inspectTemplate to be read, for example - ``organizations/433245324/inspectTemplates/432452342`` - or projects/project-id/inspectTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.InspectTemplate: - The inspectTemplate contains a - configuration (set of types of sensitive - data to be detected) to be used anywhere - you otherwise would normally specify - InspectConfig. See - https://cloud.google.com/dlp/docs/concepts- - templates to learn more. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.GetInspectTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.GetInspectTemplateRequest): - request = dlp.GetInspectTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_inspect_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_inspect_templates(self, - request: dlp.ListInspectTemplatesRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListInspectTemplatesPager: - r"""Lists InspectTemplates. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Args: - request (google.cloud.dlp_v2.types.ListInspectTemplatesRequest): - The request object. Request message for - ListInspectTemplates. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListInspectTemplatesPager: - Response message for - ListInspectTemplates. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ListInspectTemplatesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ListInspectTemplatesRequest): - request = dlp.ListInspectTemplatesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_inspect_templates] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListInspectTemplatesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_inspect_template(self, - request: dlp.DeleteInspectTemplateRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Args: - request (google.cloud.dlp_v2.types.DeleteInspectTemplateRequest): - The request object. Request message for - DeleteInspectTemplate. - name (str): - Required. Resource name of the organization and - inspectTemplate to be deleted, for example - ``organizations/433245324/inspectTemplates/432452342`` - or projects/project-id/inspectTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.DeleteInspectTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.DeleteInspectTemplateRequest): - request = dlp.DeleteInspectTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_inspect_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def create_deidentify_template(self, - request: dlp.CreateDeidentifyTemplateRequest = None, - *, - parent: str = None, - deidentify_template: dlp.DeidentifyTemplate = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyTemplate: - r"""Creates a DeidentifyTemplate for re-using frequently - used configuration for de-identifying content, images, - and storage. See - https://cloud.google.com/dlp/docs/creating-templates- - deid to learn more. - - Args: - request (google.cloud.dlp_v2.types.CreateDeidentifyTemplateRequest): - The request object. Request message for - CreateDeidentifyTemplate. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): - Required. The DeidentifyTemplate to - create. - - This corresponds to the ``deidentify_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts- - templates to learn more. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, deidentify_template]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.CreateDeidentifyTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.CreateDeidentifyTemplateRequest): - request = dlp.CreateDeidentifyTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if deidentify_template is not None: - request.deidentify_template = deidentify_template - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_deidentify_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_deidentify_template(self, - request: dlp.UpdateDeidentifyTemplateRequest = None, - *, - name: str = None, - deidentify_template: dlp.DeidentifyTemplate = None, - update_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyTemplate: - r"""Updates the DeidentifyTemplate. - See https://cloud.google.com/dlp/docs/creating- - templates-deid to learn more. - - Args: - request (google.cloud.dlp_v2.types.UpdateDeidentifyTemplateRequest): - The request object. Request message for - UpdateDeidentifyTemplate. - name (str): - Required. Resource name of organization and deidentify - template to be updated, for example - ``organizations/433245324/deidentifyTemplates/432452342`` - or projects/project-id/deidentifyTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): - New DeidentifyTemplate value. - This corresponds to the ``deidentify_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts- - templates to learn more. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, deidentify_template, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.UpdateDeidentifyTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.UpdateDeidentifyTemplateRequest): - request = dlp.UpdateDeidentifyTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if deidentify_template is not None: - request.deidentify_template = deidentify_template - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_deidentify_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_deidentify_template(self, - request: dlp.GetDeidentifyTemplateRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DeidentifyTemplate: - r"""Gets a DeidentifyTemplate. - See https://cloud.google.com/dlp/docs/creating- - templates-deid to learn more. - - Args: - request (google.cloud.dlp_v2.types.GetDeidentifyTemplateRequest): - The request object. Request message for - GetDeidentifyTemplate. - name (str): - Required. Resource name of the organization and - deidentify template to be read, for example - ``organizations/433245324/deidentifyTemplates/432452342`` - or projects/project-id/deidentifyTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DeidentifyTemplate: - DeidentifyTemplates contains - instructions on how to de-identify - content. See - https://cloud.google.com/dlp/docs/concepts- - templates to learn more. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.GetDeidentifyTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.GetDeidentifyTemplateRequest): - request = dlp.GetDeidentifyTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_deidentify_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_deidentify_templates(self, - request: dlp.ListDeidentifyTemplatesRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDeidentifyTemplatesPager: - r"""Lists DeidentifyTemplates. - See https://cloud.google.com/dlp/docs/creating- - templates-deid to learn more. - - Args: - request (google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest): - The request object. Request message for - ListDeidentifyTemplates. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListDeidentifyTemplatesPager: - Response message for - ListDeidentifyTemplates. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ListDeidentifyTemplatesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ListDeidentifyTemplatesRequest): - request = dlp.ListDeidentifyTemplatesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_deidentify_templates] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDeidentifyTemplatesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_deidentify_template(self, - request: dlp.DeleteDeidentifyTemplateRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a DeidentifyTemplate. - See https://cloud.google.com/dlp/docs/creating- - templates-deid to learn more. - - Args: - request (google.cloud.dlp_v2.types.DeleteDeidentifyTemplateRequest): - The request object. Request message for - DeleteDeidentifyTemplate. - name (str): - Required. Resource name of the organization and - deidentify template to be deleted, for example - ``organizations/433245324/deidentifyTemplates/432452342`` - or projects/project-id/deidentifyTemplates/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.DeleteDeidentifyTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.DeleteDeidentifyTemplateRequest): - request = dlp.DeleteDeidentifyTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_deidentify_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def create_job_trigger(self, - request: dlp.CreateJobTriggerRequest = None, - *, - parent: str = None, - job_trigger: dlp.JobTrigger = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.JobTrigger: - r"""Creates a job trigger to run DLP actions such as - scanning storage for sensitive information on a set - schedule. See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Args: - request (google.cloud.dlp_v2.types.CreateJobTriggerRequest): - The request object. Request message for - CreateJobTrigger. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_trigger (google.cloud.dlp_v2.types.JobTrigger): - Required. The JobTrigger to create. - This corresponds to the ``job_trigger`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make dlp - api calls on a repeating basis. See - https://cloud.google.com/dlp/docs/concepts- - job-triggers to learn more. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, job_trigger]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.CreateJobTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.CreateJobTriggerRequest): - request = dlp.CreateJobTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job_trigger is not None: - request.job_trigger = job_trigger - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_job_trigger(self, - request: dlp.UpdateJobTriggerRequest = None, - *, - name: str = None, - job_trigger: dlp.JobTrigger = None, - update_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.JobTrigger: - r"""Updates a job trigger. - See https://cloud.google.com/dlp/docs/creating-job- - triggers to learn more. - - Args: - request (google.cloud.dlp_v2.types.UpdateJobTriggerRequest): - The request object. Request message for - UpdateJobTrigger. - name (str): - Required. Resource name of the project and the - triggeredJob, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_trigger (google.cloud.dlp_v2.types.JobTrigger): - New JobTrigger value. - This corresponds to the ``job_trigger`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make dlp - api calls on a repeating basis. See - https://cloud.google.com/dlp/docs/concepts- - job-triggers to learn more. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, job_trigger, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.UpdateJobTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.UpdateJobTriggerRequest): - request = dlp.UpdateJobTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if job_trigger is not None: - request.job_trigger = job_trigger - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def hybrid_inspect_job_trigger(self, - request: dlp.HybridInspectJobTriggerRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.HybridInspectResponse: - r"""Inspect hybrid content and store findings to a - trigger. The inspection will be processed - asynchronously. To review the findings monitor the jobs - within the trigger. - Early access feature is in a pre-release state and might - change or have limited support. For more information, - see - https://cloud.google.com/products#product-launch-stages. - - Args: - request (google.cloud.dlp_v2.types.HybridInspectJobTriggerRequest): - The request object. Request to search for potentially - sensitive info in a custom location. - name (str): - Required. Resource name of the trigger to execute a - hybrid inspect on, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.HybridInspectResponse: - Quota exceeded errors will be thrown - once quota has been met. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.HybridInspectJobTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.HybridInspectJobTriggerRequest): - request = dlp.HybridInspectJobTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.hybrid_inspect_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_job_trigger(self, - request: dlp.GetJobTriggerRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.JobTrigger: - r"""Gets a job trigger. - See https://cloud.google.com/dlp/docs/creating-job- - triggers to learn more. - - Args: - request (google.cloud.dlp_v2.types.GetJobTriggerRequest): - The request object. Request message for GetJobTrigger. - name (str): - Required. Resource name of the project and the - triggeredJob, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.JobTrigger: - Contains a configuration to make dlp - api calls on a repeating basis. See - https://cloud.google.com/dlp/docs/concepts- - job-triggers to learn more. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.GetJobTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.GetJobTriggerRequest): - request = dlp.GetJobTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_job_triggers(self, - request: dlp.ListJobTriggersRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobTriggersPager: - r"""Lists job triggers. - See https://cloud.google.com/dlp/docs/creating-job- - triggers to learn more. - - Args: - request (google.cloud.dlp_v2.types.ListJobTriggersRequest): - The request object. Request message for ListJobTriggers. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListJobTriggersPager: - Response message for ListJobTriggers. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ListJobTriggersRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ListJobTriggersRequest): - request = dlp.ListJobTriggersRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_job_triggers] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListJobTriggersPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_job_trigger(self, - request: dlp.DeleteJobTriggerRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a job trigger. - See https://cloud.google.com/dlp/docs/creating-job- - triggers to learn more. - - Args: - request (google.cloud.dlp_v2.types.DeleteJobTriggerRequest): - The request object. Request message for - DeleteJobTrigger. - name (str): - Required. Resource name of the project and the - triggeredJob, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.DeleteJobTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.DeleteJobTriggerRequest): - request = dlp.DeleteJobTriggerRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def activate_job_trigger(self, - request: dlp.ActivateJobTriggerRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DlpJob: - r"""Activate a job trigger. Causes the immediate execute - of a trigger instead of waiting on the trigger event to - occur. - - Args: - request (google.cloud.dlp_v2.types.ActivateJobTriggerRequest): - The request object. Request message for - ActivateJobTrigger. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DlpJob: - Combines all of the information about - a DLP job. - - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ActivateJobTriggerRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ActivateJobTriggerRequest): - request = dlp.ActivateJobTriggerRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.activate_job_trigger] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def create_dlp_job(self, - request: dlp.CreateDlpJobRequest = None, - *, - parent: str = None, - inspect_job: dlp.InspectJobConfig = None, - risk_job: dlp.RiskAnalysisJobConfig = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DlpJob: - r"""Creates a new job to inspect storage or calculate - risk metrics. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - When no InfoTypes or CustomInfoTypes are specified in - inspect jobs, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Args: - request (google.cloud.dlp_v2.types.CreateDlpJobRequest): - The request object. Request message for - CreateDlpJobRequest. Used to initiate long running jobs - such as calculating risk metrics or inspecting Google - Cloud Storage. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): - Set to control what and how to - inspect. - - This corresponds to the ``inspect_job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - risk_job (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): - Set to choose what metric to - calculate. - - This corresponds to the ``risk_job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DlpJob: - Combines all of the information about - a DLP job. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, inspect_job, risk_job]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.CreateDlpJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.CreateDlpJobRequest): - request = dlp.CreateDlpJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if inspect_job is not None: - request.inspect_job = inspect_job - if risk_job is not None: - request.risk_job = risk_job - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_dlp_jobs(self, - request: dlp.ListDlpJobsRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListDlpJobsPager: - r"""Lists DlpJobs that match the specified filter in the - request. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - Args: - request (google.cloud.dlp_v2.types.ListDlpJobsRequest): - The request object. The request message for listing DLP - jobs. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListDlpJobsPager: - The response message for listing DLP - jobs. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ListDlpJobsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ListDlpJobsRequest): - request = dlp.ListDlpJobsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_dlp_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListDlpJobsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_dlp_job(self, - request: dlp.GetDlpJobRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.DlpJob: - r"""Gets the latest state of a long-running DlpJob. - See https://cloud.google.com/dlp/docs/inspecting-storage - and https://cloud.google.com/dlp/docs/compute-risk- - analysis to learn more. - - Args: - request (google.cloud.dlp_v2.types.GetDlpJobRequest): - The request object. The request message for - [DlpJobs.GetDlpJob][]. - name (str): - Required. The name of the DlpJob - resource. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.DlpJob: - Combines all of the information about - a DLP job. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.GetDlpJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.GetDlpJobRequest): - request = dlp.GetDlpJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_dlp_job(self, - request: dlp.DeleteDlpJobRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a long-running DlpJob. This method indicates - that the client is no longer interested in the DlpJob - result. The job will be cancelled if possible. - See https://cloud.google.com/dlp/docs/inspecting-storage - and https://cloud.google.com/dlp/docs/compute-risk- - analysis to learn more. - - Args: - request (google.cloud.dlp_v2.types.DeleteDlpJobRequest): - The request object. The request message for deleting a - DLP job. - name (str): - Required. The name of the DlpJob - resource to be deleted. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.DeleteDlpJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.DeleteDlpJobRequest): - request = dlp.DeleteDlpJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def cancel_dlp_job(self, - request: dlp.CancelDlpJobRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Starts asynchronous cancellation on a long-running - DlpJob. The server makes a best effort to cancel the - DlpJob, but success is not guaranteed. - See https://cloud.google.com/dlp/docs/inspecting-storage - and https://cloud.google.com/dlp/docs/compute-risk- - analysis to learn more. - - Args: - request (google.cloud.dlp_v2.types.CancelDlpJobRequest): - The request object. The request message for canceling a - DLP job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a dlp.CancelDlpJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.CancelDlpJobRequest): - request = dlp.CancelDlpJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.cancel_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def create_stored_info_type(self, - request: dlp.CreateStoredInfoTypeRequest = None, - *, - parent: str = None, - config: dlp.StoredInfoTypeConfig = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.StoredInfoType: - r"""Creates a pre-built stored infoType to be used for - inspection. See - https://cloud.google.com/dlp/docs/creating-stored- - infotypes to learn more. - - Args: - request (google.cloud.dlp_v2.types.CreateStoredInfoTypeRequest): - The request object. Request message for - CreateStoredInfoType. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): - Required. Configuration of the - storedInfoType to create. - - This corresponds to the ``config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, config]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.CreateStoredInfoTypeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.CreateStoredInfoTypeRequest): - request = dlp.CreateStoredInfoTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if config is not None: - request.config = config - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_stored_info_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def update_stored_info_type(self, - request: dlp.UpdateStoredInfoTypeRequest = None, - *, - name: str = None, - config: dlp.StoredInfoTypeConfig = None, - update_mask: field_mask_pb2.FieldMask = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.StoredInfoType: - r"""Updates the stored infoType by creating a new - version. The existing version will continue to be used - until the new version is ready. See - https://cloud.google.com/dlp/docs/creating-stored- - infotypes to learn more. - - Args: - request (google.cloud.dlp_v2.types.UpdateStoredInfoTypeRequest): - The request object. Request message for - UpdateStoredInfoType. - name (str): - Required. Resource name of organization and - storedInfoType to be updated, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): - Updated configuration for the - storedInfoType. If not provided, a new - version of the storedInfoType will be - created with the existing configuration. - - This corresponds to the ``config`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get - updated. - - This corresponds to the ``update_mask`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name, config, update_mask]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.UpdateStoredInfoTypeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.UpdateStoredInfoTypeRequest): - request = dlp.UpdateStoredInfoTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - if config is not None: - request.config = config - if update_mask is not None: - request.update_mask = update_mask - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.update_stored_info_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_stored_info_type(self, - request: dlp.GetStoredInfoTypeRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.StoredInfoType: - r"""Gets a stored infoType. - See https://cloud.google.com/dlp/docs/creating-stored- - infotypes to learn more. - - Args: - request (google.cloud.dlp_v2.types.GetStoredInfoTypeRequest): - The request object. Request message for - GetStoredInfoType. - name (str): - Required. Resource name of the organization and - storedInfoType to be read, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.StoredInfoType: - StoredInfoType resource message that - contains information about the current - version and any pending updates. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.GetStoredInfoTypeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.GetStoredInfoTypeRequest): - request = dlp.GetStoredInfoTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_stored_info_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_stored_info_types(self, - request: dlp.ListStoredInfoTypesRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListStoredInfoTypesPager: - r"""Lists stored infoTypes. - See https://cloud.google.com/dlp/docs/creating-stored- - infotypes to learn more. - - Args: - request (google.cloud.dlp_v2.types.ListStoredInfoTypesRequest): - The request object. Request message for - ListStoredInfoTypes. - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope - of the request (project or organization) and whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults - to global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a - parent project with the identifier ``example-project``, - and specifies the ``europe-west3`` location for - processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.services.dlp_service.pagers.ListStoredInfoTypesPager: - Response message for - ListStoredInfoTypes. - Iterating over this object will yield - results and resolve additional pages - automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.ListStoredInfoTypesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.ListStoredInfoTypesRequest): - request = dlp.ListStoredInfoTypesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_stored_info_types] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListStoredInfoTypesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_stored_info_type(self, - request: dlp.DeleteStoredInfoTypeRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a stored infoType. - See https://cloud.google.com/dlp/docs/creating-stored- - infotypes to learn more. - - Args: - request (google.cloud.dlp_v2.types.DeleteStoredInfoTypeRequest): - The request object. Request message for - DeleteStoredInfoType. - name (str): - Required. Resource name of the organization and - storedInfoType to be deleted, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.DeleteStoredInfoTypeRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.DeleteStoredInfoTypeRequest): - request = dlp.DeleteStoredInfoTypeRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_stored_info_type] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def hybrid_inspect_dlp_job(self, - request: dlp.HybridInspectDlpJobRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> dlp.HybridInspectResponse: - r"""Inspect hybrid content and store findings to a job. - To review the findings inspect the job. Inspection will - occur asynchronously. - Early access feature is in a pre-release state and might - change or have limited support. For more information, - see - https://cloud.google.com/products#product-launch-stages. - - Args: - request (google.cloud.dlp_v2.types.HybridInspectDlpJobRequest): - The request object. Request to search for potentially - sensitive info in a custom location. - name (str): - Required. Resource name of the job to execute a hybrid - inspect on, for example - ``projects/dlp-test-project/dlpJob/53234423``. - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.dlp_v2.types.HybridInspectResponse: - Quota exceeded errors will be thrown - once quota has been met. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a dlp.HybridInspectDlpJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.HybridInspectDlpJobRequest): - request = dlp.HybridInspectDlpJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.hybrid_inspect_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def finish_dlp_job(self, - request: dlp.FinishDlpJobRequest = None, - *, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Finish a running hybrid DlpJob. Triggers the - finalization steps and running of any enabled actions - that have not yet run. Early access feature is in a pre- - release state and might change or have limited support. - For more information, see - https://cloud.google.com/products#product-launch-stages. - - Args: - request (google.cloud.dlp_v2.types.FinishDlpJobRequest): - The request object. The request message for finishing a - DLP hybrid job. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Minor optimization to avoid making a copy if the user passes - # in a dlp.FinishDlpJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, dlp.FinishDlpJobRequest): - request = dlp.FinishDlpJobRequest(request) - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.finish_dlp_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-dlp", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "DlpServiceClient", -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py deleted file mode 100644 index 51cc2937..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/pagers.py +++ /dev/null @@ -1,628 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional - -from google.cloud.dlp_v2.types import dlp - - -class ListInspectTemplatesPager: - """A pager for iterating through ``list_inspect_templates`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``inspect_templates`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListInspectTemplates`` requests and continue to iterate - through the ``inspect_templates`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.ListInspectTemplatesResponse], - request: dlp.ListInspectTemplatesRequest, - response: dlp.ListInspectTemplatesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListInspectTemplatesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListInspectTemplatesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListInspectTemplatesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterable[dlp.ListInspectTemplatesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterable[dlp.InspectTemplate]: - for page in self.pages: - yield from page.inspect_templates - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListInspectTemplatesAsyncPager: - """A pager for iterating through ``list_inspect_templates`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``inspect_templates`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListInspectTemplates`` requests and continue to iterate - through the ``inspect_templates`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListInspectTemplatesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.ListInspectTemplatesResponse]], - request: dlp.ListInspectTemplatesRequest, - response: dlp.ListInspectTemplatesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListInspectTemplatesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListInspectTemplatesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListInspectTemplatesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterable[dlp.ListInspectTemplatesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterable[dlp.InspectTemplate]: - async def async_generator(): - async for page in self.pages: - for response in page.inspect_templates: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDeidentifyTemplatesPager: - """A pager for iterating through ``list_deidentify_templates`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``deidentify_templates`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDeidentifyTemplates`` requests and continue to iterate - through the ``deidentify_templates`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.ListDeidentifyTemplatesResponse], - request: dlp.ListDeidentifyTemplatesRequest, - response: dlp.ListDeidentifyTemplatesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListDeidentifyTemplatesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterable[dlp.ListDeidentifyTemplatesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterable[dlp.DeidentifyTemplate]: - for page in self.pages: - yield from page.deidentify_templates - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDeidentifyTemplatesAsyncPager: - """A pager for iterating through ``list_deidentify_templates`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``deidentify_templates`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDeidentifyTemplates`` requests and continue to iterate - through the ``deidentify_templates`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.ListDeidentifyTemplatesResponse]], - request: dlp.ListDeidentifyTemplatesRequest, - response: dlp.ListDeidentifyTemplatesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListDeidentifyTemplatesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListDeidentifyTemplatesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListDeidentifyTemplatesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterable[dlp.ListDeidentifyTemplatesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterable[dlp.DeidentifyTemplate]: - async def async_generator(): - async for page in self.pages: - for response in page.deidentify_templates: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListJobTriggersPager: - """A pager for iterating through ``list_job_triggers`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` object, and - provides an ``__iter__`` method to iterate through its - ``job_triggers`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListJobTriggers`` requests and continue to iterate - through the ``job_triggers`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.ListJobTriggersResponse], - request: dlp.ListJobTriggersRequest, - response: dlp.ListJobTriggersResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListJobTriggersRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListJobTriggersResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListJobTriggersRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterable[dlp.ListJobTriggersResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterable[dlp.JobTrigger]: - for page in self.pages: - yield from page.job_triggers - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListJobTriggersAsyncPager: - """A pager for iterating through ``list_job_triggers`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``job_triggers`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListJobTriggers`` requests and continue to iterate - through the ``job_triggers`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListJobTriggersResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.ListJobTriggersResponse]], - request: dlp.ListJobTriggersRequest, - response: dlp.ListJobTriggersResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListJobTriggersRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListJobTriggersResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListJobTriggersRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterable[dlp.ListJobTriggersResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterable[dlp.JobTrigger]: - async def async_generator(): - async for page in self.pages: - for response in page.job_triggers: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDlpJobsPager: - """A pager for iterating through ``list_dlp_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListDlpJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.ListDlpJobsResponse], - request: dlp.ListDlpJobsRequest, - response: dlp.ListDlpJobsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListDlpJobsRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListDlpJobsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListDlpJobsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterable[dlp.ListDlpJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterable[dlp.DlpJob]: - for page in self.pages: - yield from page.jobs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListDlpJobsAsyncPager: - """A pager for iterating through ``list_dlp_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListDlpJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListDlpJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.ListDlpJobsResponse]], - request: dlp.ListDlpJobsRequest, - response: dlp.ListDlpJobsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListDlpJobsRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListDlpJobsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListDlpJobsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterable[dlp.ListDlpJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterable[dlp.DlpJob]: - async def async_generator(): - async for page in self.pages: - for response in page.jobs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListStoredInfoTypesPager: - """A pager for iterating through ``list_stored_info_types`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``stored_info_types`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListStoredInfoTypes`` requests and continue to iterate - through the ``stored_info_types`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., dlp.ListStoredInfoTypesResponse], - request: dlp.ListStoredInfoTypesRequest, - response: dlp.ListStoredInfoTypesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListStoredInfoTypesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListStoredInfoTypesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListStoredInfoTypesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterable[dlp.ListStoredInfoTypesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterable[dlp.StoredInfoType]: - for page in self.pages: - yield from page.stored_info_types - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListStoredInfoTypesAsyncPager: - """A pager for iterating through ``list_stored_info_types`` requests. - - This class thinly wraps an initial - :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``stored_info_types`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListStoredInfoTypes`` requests and continue to iterate - through the ``stored_info_types`` field on the - corresponding responses. - - All the usual :class:`google.cloud.dlp_v2.types.ListStoredInfoTypesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[dlp.ListStoredInfoTypesResponse]], - request: dlp.ListStoredInfoTypesRequest, - response: dlp.ListStoredInfoTypesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.dlp_v2.types.ListStoredInfoTypesRequest): - The initial request object. - response (google.cloud.dlp_v2.types.ListStoredInfoTypesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = dlp.ListStoredInfoTypesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterable[dlp.ListStoredInfoTypesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterable[dlp.StoredInfoType]: - async def async_generator(): - async for page in self.pages: - for response in page.stored_info_types: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py deleted file mode 100644 index dd85ecf8..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import DlpServiceTransport -from .grpc import DlpServiceGrpcTransport -from .grpc_asyncio import DlpServiceGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[DlpServiceTransport]] -_transport_registry['grpc'] = DlpServiceGrpcTransport -_transport_registry['grpc_asyncio'] = DlpServiceGrpcAsyncIOTransport - -__all__ = ( - 'DlpServiceTransport', - 'DlpServiceGrpcTransport', - 'DlpServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py deleted file mode 100644 index 4abe2c3d..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/base.py +++ /dev/null @@ -1,771 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version -import pkg_resources - -import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.dlp_v2.types import dlp -from google.protobuf import empty_pb2 # type: ignore - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - 'google-cloud-dlp', - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - - -class DlpServiceTransport(abc.ABC): - """Abstract transport class for DlpService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'dlp.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - - # If the credentials is service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.inspect_content: gapic_v1.method.wrap_method( - self.inspect_content, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.redact_image: gapic_v1.method.wrap_method( - self.redact_image, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.deidentify_content: gapic_v1.method.wrap_method( - self.deidentify_content, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.reidentify_content: gapic_v1.method.wrap_method( - self.reidentify_content, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_info_types: gapic_v1.method.wrap_method( - self.list_info_types, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.create_inspect_template: gapic_v1.method.wrap_method( - self.create_inspect_template, - default_timeout=300.0, - client_info=client_info, - ), - self.update_inspect_template: gapic_v1.method.wrap_method( - self.update_inspect_template, - default_timeout=300.0, - client_info=client_info, - ), - self.get_inspect_template: gapic_v1.method.wrap_method( - self.get_inspect_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_inspect_templates: gapic_v1.method.wrap_method( - self.list_inspect_templates, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_inspect_template: gapic_v1.method.wrap_method( - self.delete_inspect_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.create_deidentify_template: gapic_v1.method.wrap_method( - self.create_deidentify_template, - default_timeout=300.0, - client_info=client_info, - ), - self.update_deidentify_template: gapic_v1.method.wrap_method( - self.update_deidentify_template, - default_timeout=300.0, - client_info=client_info, - ), - self.get_deidentify_template: gapic_v1.method.wrap_method( - self.get_deidentify_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_deidentify_templates: gapic_v1.method.wrap_method( - self.list_deidentify_templates, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_deidentify_template: gapic_v1.method.wrap_method( - self.delete_deidentify_template, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.create_job_trigger: gapic_v1.method.wrap_method( - self.create_job_trigger, - default_timeout=300.0, - client_info=client_info, - ), - self.update_job_trigger: gapic_v1.method.wrap_method( - self.update_job_trigger, - default_timeout=300.0, - client_info=client_info, - ), - self.hybrid_inspect_job_trigger: gapic_v1.method.wrap_method( - self.hybrid_inspect_job_trigger, - default_timeout=300.0, - client_info=client_info, - ), - self.get_job_trigger: gapic_v1.method.wrap_method( - self.get_job_trigger, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_job_triggers: gapic_v1.method.wrap_method( - self.list_job_triggers, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_job_trigger: gapic_v1.method.wrap_method( - self.delete_job_trigger, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.activate_job_trigger: gapic_v1.method.wrap_method( - self.activate_job_trigger, - default_timeout=300.0, - client_info=client_info, - ), - self.create_dlp_job: gapic_v1.method.wrap_method( - self.create_dlp_job, - default_timeout=300.0, - client_info=client_info, - ), - self.list_dlp_jobs: gapic_v1.method.wrap_method( - self.list_dlp_jobs, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.get_dlp_job: gapic_v1.method.wrap_method( - self.get_dlp_job, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_dlp_job: gapic_v1.method.wrap_method( - self.delete_dlp_job, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.cancel_dlp_job: gapic_v1.method.wrap_method( - self.cancel_dlp_job, - default_timeout=300.0, - client_info=client_info, - ), - self.create_stored_info_type: gapic_v1.method.wrap_method( - self.create_stored_info_type, - default_timeout=300.0, - client_info=client_info, - ), - self.update_stored_info_type: gapic_v1.method.wrap_method( - self.update_stored_info_type, - default_timeout=300.0, - client_info=client_info, - ), - self.get_stored_info_type: gapic_v1.method.wrap_method( - self.get_stored_info_type, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.list_stored_info_types: gapic_v1.method.wrap_method( - self.list_stored_info_types, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.delete_stored_info_type: gapic_v1.method.wrap_method( - self.delete_stored_info_type, - default_retry=retries.Retry( -initial=0.1,maximum=60.0,multiplier=1.3, predicate=retries.if_exception_type( - core_exceptions.DeadlineExceeded, - core_exceptions.ServiceUnavailable, - ), - deadline=300.0, - ), - default_timeout=300.0, - client_info=client_info, - ), - self.hybrid_inspect_dlp_job: gapic_v1.method.wrap_method( - self.hybrid_inspect_dlp_job, - default_timeout=300.0, - client_info=client_info, - ), - self.finish_dlp_job: gapic_v1.method.wrap_method( - self.finish_dlp_job, - default_timeout=300.0, - client_info=client_info, - ), - } - - @property - def inspect_content(self) -> Callable[ - [dlp.InspectContentRequest], - Union[ - dlp.InspectContentResponse, - Awaitable[dlp.InspectContentResponse] - ]]: - raise NotImplementedError() - - @property - def redact_image(self) -> Callable[ - [dlp.RedactImageRequest], - Union[ - dlp.RedactImageResponse, - Awaitable[dlp.RedactImageResponse] - ]]: - raise NotImplementedError() - - @property - def deidentify_content(self) -> Callable[ - [dlp.DeidentifyContentRequest], - Union[ - dlp.DeidentifyContentResponse, - Awaitable[dlp.DeidentifyContentResponse] - ]]: - raise NotImplementedError() - - @property - def reidentify_content(self) -> Callable[ - [dlp.ReidentifyContentRequest], - Union[ - dlp.ReidentifyContentResponse, - Awaitable[dlp.ReidentifyContentResponse] - ]]: - raise NotImplementedError() - - @property - def list_info_types(self) -> Callable[ - [dlp.ListInfoTypesRequest], - Union[ - dlp.ListInfoTypesResponse, - Awaitable[dlp.ListInfoTypesResponse] - ]]: - raise NotImplementedError() - - @property - def create_inspect_template(self) -> Callable[ - [dlp.CreateInspectTemplateRequest], - Union[ - dlp.InspectTemplate, - Awaitable[dlp.InspectTemplate] - ]]: - raise NotImplementedError() - - @property - def update_inspect_template(self) -> Callable[ - [dlp.UpdateInspectTemplateRequest], - Union[ - dlp.InspectTemplate, - Awaitable[dlp.InspectTemplate] - ]]: - raise NotImplementedError() - - @property - def get_inspect_template(self) -> Callable[ - [dlp.GetInspectTemplateRequest], - Union[ - dlp.InspectTemplate, - Awaitable[dlp.InspectTemplate] - ]]: - raise NotImplementedError() - - @property - def list_inspect_templates(self) -> Callable[ - [dlp.ListInspectTemplatesRequest], - Union[ - dlp.ListInspectTemplatesResponse, - Awaitable[dlp.ListInspectTemplatesResponse] - ]]: - raise NotImplementedError() - - @property - def delete_inspect_template(self) -> Callable[ - [dlp.DeleteInspectTemplateRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def create_deidentify_template(self) -> Callable[ - [dlp.CreateDeidentifyTemplateRequest], - Union[ - dlp.DeidentifyTemplate, - Awaitable[dlp.DeidentifyTemplate] - ]]: - raise NotImplementedError() - - @property - def update_deidentify_template(self) -> Callable[ - [dlp.UpdateDeidentifyTemplateRequest], - Union[ - dlp.DeidentifyTemplate, - Awaitable[dlp.DeidentifyTemplate] - ]]: - raise NotImplementedError() - - @property - def get_deidentify_template(self) -> Callable[ - [dlp.GetDeidentifyTemplateRequest], - Union[ - dlp.DeidentifyTemplate, - Awaitable[dlp.DeidentifyTemplate] - ]]: - raise NotImplementedError() - - @property - def list_deidentify_templates(self) -> Callable[ - [dlp.ListDeidentifyTemplatesRequest], - Union[ - dlp.ListDeidentifyTemplatesResponse, - Awaitable[dlp.ListDeidentifyTemplatesResponse] - ]]: - raise NotImplementedError() - - @property - def delete_deidentify_template(self) -> Callable[ - [dlp.DeleteDeidentifyTemplateRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def create_job_trigger(self) -> Callable[ - [dlp.CreateJobTriggerRequest], - Union[ - dlp.JobTrigger, - Awaitable[dlp.JobTrigger] - ]]: - raise NotImplementedError() - - @property - def update_job_trigger(self) -> Callable[ - [dlp.UpdateJobTriggerRequest], - Union[ - dlp.JobTrigger, - Awaitable[dlp.JobTrigger] - ]]: - raise NotImplementedError() - - @property - def hybrid_inspect_job_trigger(self) -> Callable[ - [dlp.HybridInspectJobTriggerRequest], - Union[ - dlp.HybridInspectResponse, - Awaitable[dlp.HybridInspectResponse] - ]]: - raise NotImplementedError() - - @property - def get_job_trigger(self) -> Callable[ - [dlp.GetJobTriggerRequest], - Union[ - dlp.JobTrigger, - Awaitable[dlp.JobTrigger] - ]]: - raise NotImplementedError() - - @property - def list_job_triggers(self) -> Callable[ - [dlp.ListJobTriggersRequest], - Union[ - dlp.ListJobTriggersResponse, - Awaitable[dlp.ListJobTriggersResponse] - ]]: - raise NotImplementedError() - - @property - def delete_job_trigger(self) -> Callable[ - [dlp.DeleteJobTriggerRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def activate_job_trigger(self) -> Callable[ - [dlp.ActivateJobTriggerRequest], - Union[ - dlp.DlpJob, - Awaitable[dlp.DlpJob] - ]]: - raise NotImplementedError() - - @property - def create_dlp_job(self) -> Callable[ - [dlp.CreateDlpJobRequest], - Union[ - dlp.DlpJob, - Awaitable[dlp.DlpJob] - ]]: - raise NotImplementedError() - - @property - def list_dlp_jobs(self) -> Callable[ - [dlp.ListDlpJobsRequest], - Union[ - dlp.ListDlpJobsResponse, - Awaitable[dlp.ListDlpJobsResponse] - ]]: - raise NotImplementedError() - - @property - def get_dlp_job(self) -> Callable[ - [dlp.GetDlpJobRequest], - Union[ - dlp.DlpJob, - Awaitable[dlp.DlpJob] - ]]: - raise NotImplementedError() - - @property - def delete_dlp_job(self) -> Callable[ - [dlp.DeleteDlpJobRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def cancel_dlp_job(self) -> Callable[ - [dlp.CancelDlpJobRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def create_stored_info_type(self) -> Callable[ - [dlp.CreateStoredInfoTypeRequest], - Union[ - dlp.StoredInfoType, - Awaitable[dlp.StoredInfoType] - ]]: - raise NotImplementedError() - - @property - def update_stored_info_type(self) -> Callable[ - [dlp.UpdateStoredInfoTypeRequest], - Union[ - dlp.StoredInfoType, - Awaitable[dlp.StoredInfoType] - ]]: - raise NotImplementedError() - - @property - def get_stored_info_type(self) -> Callable[ - [dlp.GetStoredInfoTypeRequest], - Union[ - dlp.StoredInfoType, - Awaitable[dlp.StoredInfoType] - ]]: - raise NotImplementedError() - - @property - def list_stored_info_types(self) -> Callable[ - [dlp.ListStoredInfoTypesRequest], - Union[ - dlp.ListStoredInfoTypesResponse, - Awaitable[dlp.ListStoredInfoTypesResponse] - ]]: - raise NotImplementedError() - - @property - def delete_stored_info_type(self) -> Callable[ - [dlp.DeleteStoredInfoTypeRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def hybrid_inspect_dlp_job(self) -> Callable[ - [dlp.HybridInspectDlpJobRequest], - Union[ - dlp.HybridInspectResponse, - Awaitable[dlp.HybridInspectResponse] - ]]: - raise NotImplementedError() - - @property - def finish_dlp_job(self) -> Callable[ - [dlp.FinishDlpJobRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - -__all__ = ( - 'DlpServiceTransport', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py deleted file mode 100644 index 69020a3d..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc.py +++ /dev/null @@ -1,1244 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.dlp_v2.types import dlp -from google.protobuf import empty_pb2 # type: ignore -from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO - - -class DlpServiceGrpcTransport(DlpServiceTransport): - """gRPC backend transport for DlpService. - - The Cloud Data Loss Prevention (DLP) API is a service that - allows clients to detect the presence of Personally Identifiable - Information (PII) and other privacy-sensitive data in user- - supplied, unstructured data streams, like text blocks or images. - The service also includes methods for sensitive data redaction - and scheduling of data scans on Google Cloud Platform based data - sets. - To learn more about concepts and find how-to guides see - https://cloud.google.com/dlp/docs/. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'dlp.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'dlp.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def inspect_content(self) -> Callable[ - [dlp.InspectContentRequest], - dlp.InspectContentResponse]: - r"""Return a callable for the inspect content method over gRPC. - - Finds potentially sensitive info in content. - This method has limits on input size, processing time, - and output size. - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - For how to guides, see - https://cloud.google.com/dlp/docs/inspecting-images and - https://cloud.google.com/dlp/docs/inspecting-text, - - Returns: - Callable[[~.InspectContentRequest], - ~.InspectContentResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'inspect_content' not in self._stubs: - self._stubs['inspect_content'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/InspectContent', - request_serializer=dlp.InspectContentRequest.serialize, - response_deserializer=dlp.InspectContentResponse.deserialize, - ) - return self._stubs['inspect_content'] - - @property - def redact_image(self) -> Callable[ - [dlp.RedactImageRequest], - dlp.RedactImageResponse]: - r"""Return a callable for the redact image method over gRPC. - - Redacts potentially sensitive info from an image. - This method has limits on input size, processing time, - and output size. See - https://cloud.google.com/dlp/docs/redacting-sensitive- - data-images to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Returns: - Callable[[~.RedactImageRequest], - ~.RedactImageResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'redact_image' not in self._stubs: - self._stubs['redact_image'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/RedactImage', - request_serializer=dlp.RedactImageRequest.serialize, - response_deserializer=dlp.RedactImageResponse.deserialize, - ) - return self._stubs['redact_image'] - - @property - def deidentify_content(self) -> Callable[ - [dlp.DeidentifyContentRequest], - dlp.DeidentifyContentResponse]: - r"""Return a callable for the deidentify content method over gRPC. - - De-identifies potentially sensitive info from a - ContentItem. This method has limits on input size and - output size. See - https://cloud.google.com/dlp/docs/deidentify-sensitive- - data to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Returns: - Callable[[~.DeidentifyContentRequest], - ~.DeidentifyContentResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'deidentify_content' not in self._stubs: - self._stubs['deidentify_content'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeidentifyContent', - request_serializer=dlp.DeidentifyContentRequest.serialize, - response_deserializer=dlp.DeidentifyContentResponse.deserialize, - ) - return self._stubs['deidentify_content'] - - @property - def reidentify_content(self) -> Callable[ - [dlp.ReidentifyContentRequest], - dlp.ReidentifyContentResponse]: - r"""Return a callable for the reidentify content method over gRPC. - - Re-identifies content that has been de-identified. See - https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example - to learn more. - - Returns: - Callable[[~.ReidentifyContentRequest], - ~.ReidentifyContentResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'reidentify_content' not in self._stubs: - self._stubs['reidentify_content'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ReidentifyContent', - request_serializer=dlp.ReidentifyContentRequest.serialize, - response_deserializer=dlp.ReidentifyContentResponse.deserialize, - ) - return self._stubs['reidentify_content'] - - @property - def list_info_types(self) -> Callable[ - [dlp.ListInfoTypesRequest], - dlp.ListInfoTypesResponse]: - r"""Return a callable for the list info types method over gRPC. - - Returns a list of the sensitive information types - that the DLP API supports. See - https://cloud.google.com/dlp/docs/infotypes-reference to - learn more. - - Returns: - Callable[[~.ListInfoTypesRequest], - ~.ListInfoTypesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_info_types' not in self._stubs: - self._stubs['list_info_types'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListInfoTypes', - request_serializer=dlp.ListInfoTypesRequest.serialize, - response_deserializer=dlp.ListInfoTypesResponse.deserialize, - ) - return self._stubs['list_info_types'] - - @property - def create_inspect_template(self) -> Callable[ - [dlp.CreateInspectTemplateRequest], - dlp.InspectTemplate]: - r"""Return a callable for the create inspect template method over gRPC. - - Creates an InspectTemplate for re-using frequently - used configuration for inspecting content, images, and - storage. See https://cloud.google.com/dlp/docs/creating- - templates to learn more. - - Returns: - Callable[[~.CreateInspectTemplateRequest], - ~.InspectTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_inspect_template' not in self._stubs: - self._stubs['create_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateInspectTemplate', - request_serializer=dlp.CreateInspectTemplateRequest.serialize, - response_deserializer=dlp.InspectTemplate.deserialize, - ) - return self._stubs['create_inspect_template'] - - @property - def update_inspect_template(self) -> Callable[ - [dlp.UpdateInspectTemplateRequest], - dlp.InspectTemplate]: - r"""Return a callable for the update inspect template method over gRPC. - - Updates the InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.UpdateInspectTemplateRequest], - ~.InspectTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_inspect_template' not in self._stubs: - self._stubs['update_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateInspectTemplate', - request_serializer=dlp.UpdateInspectTemplateRequest.serialize, - response_deserializer=dlp.InspectTemplate.deserialize, - ) - return self._stubs['update_inspect_template'] - - @property - def get_inspect_template(self) -> Callable[ - [dlp.GetInspectTemplateRequest], - dlp.InspectTemplate]: - r"""Return a callable for the get inspect template method over gRPC. - - Gets an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.GetInspectTemplateRequest], - ~.InspectTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_inspect_template' not in self._stubs: - self._stubs['get_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetInspectTemplate', - request_serializer=dlp.GetInspectTemplateRequest.serialize, - response_deserializer=dlp.InspectTemplate.deserialize, - ) - return self._stubs['get_inspect_template'] - - @property - def list_inspect_templates(self) -> Callable[ - [dlp.ListInspectTemplatesRequest], - dlp.ListInspectTemplatesResponse]: - r"""Return a callable for the list inspect templates method over gRPC. - - Lists InspectTemplates. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.ListInspectTemplatesRequest], - ~.ListInspectTemplatesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_inspect_templates' not in self._stubs: - self._stubs['list_inspect_templates'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListInspectTemplates', - request_serializer=dlp.ListInspectTemplatesRequest.serialize, - response_deserializer=dlp.ListInspectTemplatesResponse.deserialize, - ) - return self._stubs['list_inspect_templates'] - - @property - def delete_inspect_template(self) -> Callable[ - [dlp.DeleteInspectTemplateRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete inspect template method over gRPC. - - Deletes an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.DeleteInspectTemplateRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_inspect_template' not in self._stubs: - self._stubs['delete_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteInspectTemplate', - request_serializer=dlp.DeleteInspectTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_inspect_template'] - - @property - def create_deidentify_template(self) -> Callable[ - [dlp.CreateDeidentifyTemplateRequest], - dlp.DeidentifyTemplate]: - r"""Return a callable for the create deidentify template method over gRPC. - - Creates a DeidentifyTemplate for re-using frequently - used configuration for de-identifying content, images, - and storage. See - https://cloud.google.com/dlp/docs/creating-templates- - deid to learn more. - - Returns: - Callable[[~.CreateDeidentifyTemplateRequest], - ~.DeidentifyTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_deidentify_template' not in self._stubs: - self._stubs['create_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateDeidentifyTemplate', - request_serializer=dlp.CreateDeidentifyTemplateRequest.serialize, - response_deserializer=dlp.DeidentifyTemplate.deserialize, - ) - return self._stubs['create_deidentify_template'] - - @property - def update_deidentify_template(self) -> Callable[ - [dlp.UpdateDeidentifyTemplateRequest], - dlp.DeidentifyTemplate]: - r"""Return a callable for the update deidentify template method over gRPC. - - Updates the DeidentifyTemplate. - See https://cloud.google.com/dlp/docs/creating- - templates-deid to learn more. - - Returns: - Callable[[~.UpdateDeidentifyTemplateRequest], - ~.DeidentifyTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_deidentify_template' not in self._stubs: - self._stubs['update_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateDeidentifyTemplate', - request_serializer=dlp.UpdateDeidentifyTemplateRequest.serialize, - response_deserializer=dlp.DeidentifyTemplate.deserialize, - ) - return self._stubs['update_deidentify_template'] - - @property - def get_deidentify_template(self) -> Callable[ - [dlp.GetDeidentifyTemplateRequest], - dlp.DeidentifyTemplate]: - r"""Return a callable for the get deidentify template method over gRPC. - - Gets a DeidentifyTemplate. - See https://cloud.google.com/dlp/docs/creating- - templates-deid to learn more. - - Returns: - Callable[[~.GetDeidentifyTemplateRequest], - ~.DeidentifyTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_deidentify_template' not in self._stubs: - self._stubs['get_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetDeidentifyTemplate', - request_serializer=dlp.GetDeidentifyTemplateRequest.serialize, - response_deserializer=dlp.DeidentifyTemplate.deserialize, - ) - return self._stubs['get_deidentify_template'] - - @property - def list_deidentify_templates(self) -> Callable[ - [dlp.ListDeidentifyTemplatesRequest], - dlp.ListDeidentifyTemplatesResponse]: - r"""Return a callable for the list deidentify templates method over gRPC. - - Lists DeidentifyTemplates. - See https://cloud.google.com/dlp/docs/creating- - templates-deid to learn more. - - Returns: - Callable[[~.ListDeidentifyTemplatesRequest], - ~.ListDeidentifyTemplatesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_deidentify_templates' not in self._stubs: - self._stubs['list_deidentify_templates'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListDeidentifyTemplates', - request_serializer=dlp.ListDeidentifyTemplatesRequest.serialize, - response_deserializer=dlp.ListDeidentifyTemplatesResponse.deserialize, - ) - return self._stubs['list_deidentify_templates'] - - @property - def delete_deidentify_template(self) -> Callable[ - [dlp.DeleteDeidentifyTemplateRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete deidentify template method over gRPC. - - Deletes a DeidentifyTemplate. - See https://cloud.google.com/dlp/docs/creating- - templates-deid to learn more. - - Returns: - Callable[[~.DeleteDeidentifyTemplateRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_deidentify_template' not in self._stubs: - self._stubs['delete_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteDeidentifyTemplate', - request_serializer=dlp.DeleteDeidentifyTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_deidentify_template'] - - @property - def create_job_trigger(self) -> Callable[ - [dlp.CreateJobTriggerRequest], - dlp.JobTrigger]: - r"""Return a callable for the create job trigger method over gRPC. - - Creates a job trigger to run DLP actions such as - scanning storage for sensitive information on a set - schedule. See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.CreateJobTriggerRequest], - ~.JobTrigger]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job_trigger' not in self._stubs: - self._stubs['create_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateJobTrigger', - request_serializer=dlp.CreateJobTriggerRequest.serialize, - response_deserializer=dlp.JobTrigger.deserialize, - ) - return self._stubs['create_job_trigger'] - - @property - def update_job_trigger(self) -> Callable[ - [dlp.UpdateJobTriggerRequest], - dlp.JobTrigger]: - r"""Return a callable for the update job trigger method over gRPC. - - Updates a job trigger. - See https://cloud.google.com/dlp/docs/creating-job- - triggers to learn more. - - Returns: - Callable[[~.UpdateJobTriggerRequest], - ~.JobTrigger]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_job_trigger' not in self._stubs: - self._stubs['update_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateJobTrigger', - request_serializer=dlp.UpdateJobTriggerRequest.serialize, - response_deserializer=dlp.JobTrigger.deserialize, - ) - return self._stubs['update_job_trigger'] - - @property - def hybrid_inspect_job_trigger(self) -> Callable[ - [dlp.HybridInspectJobTriggerRequest], - dlp.HybridInspectResponse]: - r"""Return a callable for the hybrid inspect job trigger method over gRPC. - - Inspect hybrid content and store findings to a - trigger. The inspection will be processed - asynchronously. To review the findings monitor the jobs - within the trigger. - Early access feature is in a pre-release state and might - change or have limited support. For more information, - see - https://cloud.google.com/products#product-launch-stages. - - Returns: - Callable[[~.HybridInspectJobTriggerRequest], - ~.HybridInspectResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'hybrid_inspect_job_trigger' not in self._stubs: - self._stubs['hybrid_inspect_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/HybridInspectJobTrigger', - request_serializer=dlp.HybridInspectJobTriggerRequest.serialize, - response_deserializer=dlp.HybridInspectResponse.deserialize, - ) - return self._stubs['hybrid_inspect_job_trigger'] - - @property - def get_job_trigger(self) -> Callable[ - [dlp.GetJobTriggerRequest], - dlp.JobTrigger]: - r"""Return a callable for the get job trigger method over gRPC. - - Gets a job trigger. - See https://cloud.google.com/dlp/docs/creating-job- - triggers to learn more. - - Returns: - Callable[[~.GetJobTriggerRequest], - ~.JobTrigger]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job_trigger' not in self._stubs: - self._stubs['get_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetJobTrigger', - request_serializer=dlp.GetJobTriggerRequest.serialize, - response_deserializer=dlp.JobTrigger.deserialize, - ) - return self._stubs['get_job_trigger'] - - @property - def list_job_triggers(self) -> Callable[ - [dlp.ListJobTriggersRequest], - dlp.ListJobTriggersResponse]: - r"""Return a callable for the list job triggers method over gRPC. - - Lists job triggers. - See https://cloud.google.com/dlp/docs/creating-job- - triggers to learn more. - - Returns: - Callable[[~.ListJobTriggersRequest], - ~.ListJobTriggersResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_job_triggers' not in self._stubs: - self._stubs['list_job_triggers'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListJobTriggers', - request_serializer=dlp.ListJobTriggersRequest.serialize, - response_deserializer=dlp.ListJobTriggersResponse.deserialize, - ) - return self._stubs['list_job_triggers'] - - @property - def delete_job_trigger(self) -> Callable[ - [dlp.DeleteJobTriggerRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete job trigger method over gRPC. - - Deletes a job trigger. - See https://cloud.google.com/dlp/docs/creating-job- - triggers to learn more. - - Returns: - Callable[[~.DeleteJobTriggerRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_job_trigger' not in self._stubs: - self._stubs['delete_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteJobTrigger', - request_serializer=dlp.DeleteJobTriggerRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_job_trigger'] - - @property - def activate_job_trigger(self) -> Callable[ - [dlp.ActivateJobTriggerRequest], - dlp.DlpJob]: - r"""Return a callable for the activate job trigger method over gRPC. - - Activate a job trigger. Causes the immediate execute - of a trigger instead of waiting on the trigger event to - occur. - - Returns: - Callable[[~.ActivateJobTriggerRequest], - ~.DlpJob]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'activate_job_trigger' not in self._stubs: - self._stubs['activate_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ActivateJobTrigger', - request_serializer=dlp.ActivateJobTriggerRequest.serialize, - response_deserializer=dlp.DlpJob.deserialize, - ) - return self._stubs['activate_job_trigger'] - - @property - def create_dlp_job(self) -> Callable[ - [dlp.CreateDlpJobRequest], - dlp.DlpJob]: - r"""Return a callable for the create dlp job method over gRPC. - - Creates a new job to inspect storage or calculate - risk metrics. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - When no InfoTypes or CustomInfoTypes are specified in - inspect jobs, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Returns: - Callable[[~.CreateDlpJobRequest], - ~.DlpJob]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_dlp_job' not in self._stubs: - self._stubs['create_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateDlpJob', - request_serializer=dlp.CreateDlpJobRequest.serialize, - response_deserializer=dlp.DlpJob.deserialize, - ) - return self._stubs['create_dlp_job'] - - @property - def list_dlp_jobs(self) -> Callable[ - [dlp.ListDlpJobsRequest], - dlp.ListDlpJobsResponse]: - r"""Return a callable for the list dlp jobs method over gRPC. - - Lists DlpJobs that match the specified filter in the - request. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.ListDlpJobsRequest], - ~.ListDlpJobsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_dlp_jobs' not in self._stubs: - self._stubs['list_dlp_jobs'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListDlpJobs', - request_serializer=dlp.ListDlpJobsRequest.serialize, - response_deserializer=dlp.ListDlpJobsResponse.deserialize, - ) - return self._stubs['list_dlp_jobs'] - - @property - def get_dlp_job(self) -> Callable[ - [dlp.GetDlpJobRequest], - dlp.DlpJob]: - r"""Return a callable for the get dlp job method over gRPC. - - Gets the latest state of a long-running DlpJob. - See https://cloud.google.com/dlp/docs/inspecting-storage - and https://cloud.google.com/dlp/docs/compute-risk- - analysis to learn more. - - Returns: - Callable[[~.GetDlpJobRequest], - ~.DlpJob]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_dlp_job' not in self._stubs: - self._stubs['get_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetDlpJob', - request_serializer=dlp.GetDlpJobRequest.serialize, - response_deserializer=dlp.DlpJob.deserialize, - ) - return self._stubs['get_dlp_job'] - - @property - def delete_dlp_job(self) -> Callable[ - [dlp.DeleteDlpJobRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete dlp job method over gRPC. - - Deletes a long-running DlpJob. This method indicates - that the client is no longer interested in the DlpJob - result. The job will be cancelled if possible. - See https://cloud.google.com/dlp/docs/inspecting-storage - and https://cloud.google.com/dlp/docs/compute-risk- - analysis to learn more. - - Returns: - Callable[[~.DeleteDlpJobRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_dlp_job' not in self._stubs: - self._stubs['delete_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteDlpJob', - request_serializer=dlp.DeleteDlpJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_dlp_job'] - - @property - def cancel_dlp_job(self) -> Callable[ - [dlp.CancelDlpJobRequest], - empty_pb2.Empty]: - r"""Return a callable for the cancel dlp job method over gRPC. - - Starts asynchronous cancellation on a long-running - DlpJob. The server makes a best effort to cancel the - DlpJob, but success is not guaranteed. - See https://cloud.google.com/dlp/docs/inspecting-storage - and https://cloud.google.com/dlp/docs/compute-risk- - analysis to learn more. - - Returns: - Callable[[~.CancelDlpJobRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_dlp_job' not in self._stubs: - self._stubs['cancel_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CancelDlpJob', - request_serializer=dlp.CancelDlpJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['cancel_dlp_job'] - - @property - def create_stored_info_type(self) -> Callable[ - [dlp.CreateStoredInfoTypeRequest], - dlp.StoredInfoType]: - r"""Return a callable for the create stored info type method over gRPC. - - Creates a pre-built stored infoType to be used for - inspection. See - https://cloud.google.com/dlp/docs/creating-stored- - infotypes to learn more. - - Returns: - Callable[[~.CreateStoredInfoTypeRequest], - ~.StoredInfoType]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_stored_info_type' not in self._stubs: - self._stubs['create_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateStoredInfoType', - request_serializer=dlp.CreateStoredInfoTypeRequest.serialize, - response_deserializer=dlp.StoredInfoType.deserialize, - ) - return self._stubs['create_stored_info_type'] - - @property - def update_stored_info_type(self) -> Callable[ - [dlp.UpdateStoredInfoTypeRequest], - dlp.StoredInfoType]: - r"""Return a callable for the update stored info type method over gRPC. - - Updates the stored infoType by creating a new - version. The existing version will continue to be used - until the new version is ready. See - https://cloud.google.com/dlp/docs/creating-stored- - infotypes to learn more. - - Returns: - Callable[[~.UpdateStoredInfoTypeRequest], - ~.StoredInfoType]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_stored_info_type' not in self._stubs: - self._stubs['update_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateStoredInfoType', - request_serializer=dlp.UpdateStoredInfoTypeRequest.serialize, - response_deserializer=dlp.StoredInfoType.deserialize, - ) - return self._stubs['update_stored_info_type'] - - @property - def get_stored_info_type(self) -> Callable[ - [dlp.GetStoredInfoTypeRequest], - dlp.StoredInfoType]: - r"""Return a callable for the get stored info type method over gRPC. - - Gets a stored infoType. - See https://cloud.google.com/dlp/docs/creating-stored- - infotypes to learn more. - - Returns: - Callable[[~.GetStoredInfoTypeRequest], - ~.StoredInfoType]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_stored_info_type' not in self._stubs: - self._stubs['get_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetStoredInfoType', - request_serializer=dlp.GetStoredInfoTypeRequest.serialize, - response_deserializer=dlp.StoredInfoType.deserialize, - ) - return self._stubs['get_stored_info_type'] - - @property - def list_stored_info_types(self) -> Callable[ - [dlp.ListStoredInfoTypesRequest], - dlp.ListStoredInfoTypesResponse]: - r"""Return a callable for the list stored info types method over gRPC. - - Lists stored infoTypes. - See https://cloud.google.com/dlp/docs/creating-stored- - infotypes to learn more. - - Returns: - Callable[[~.ListStoredInfoTypesRequest], - ~.ListStoredInfoTypesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_stored_info_types' not in self._stubs: - self._stubs['list_stored_info_types'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListStoredInfoTypes', - request_serializer=dlp.ListStoredInfoTypesRequest.serialize, - response_deserializer=dlp.ListStoredInfoTypesResponse.deserialize, - ) - return self._stubs['list_stored_info_types'] - - @property - def delete_stored_info_type(self) -> Callable[ - [dlp.DeleteStoredInfoTypeRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete stored info type method over gRPC. - - Deletes a stored infoType. - See https://cloud.google.com/dlp/docs/creating-stored- - infotypes to learn more. - - Returns: - Callable[[~.DeleteStoredInfoTypeRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_stored_info_type' not in self._stubs: - self._stubs['delete_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteStoredInfoType', - request_serializer=dlp.DeleteStoredInfoTypeRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_stored_info_type'] - - @property - def hybrid_inspect_dlp_job(self) -> Callable[ - [dlp.HybridInspectDlpJobRequest], - dlp.HybridInspectResponse]: - r"""Return a callable for the hybrid inspect dlp job method over gRPC. - - Inspect hybrid content and store findings to a job. - To review the findings inspect the job. Inspection will - occur asynchronously. - Early access feature is in a pre-release state and might - change or have limited support. For more information, - see - https://cloud.google.com/products#product-launch-stages. - - Returns: - Callable[[~.HybridInspectDlpJobRequest], - ~.HybridInspectResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'hybrid_inspect_dlp_job' not in self._stubs: - self._stubs['hybrid_inspect_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/HybridInspectDlpJob', - request_serializer=dlp.HybridInspectDlpJobRequest.serialize, - response_deserializer=dlp.HybridInspectResponse.deserialize, - ) - return self._stubs['hybrid_inspect_dlp_job'] - - @property - def finish_dlp_job(self) -> Callable[ - [dlp.FinishDlpJobRequest], - empty_pb2.Empty]: - r"""Return a callable for the finish dlp job method over gRPC. - - Finish a running hybrid DlpJob. Triggers the - finalization steps and running of any enabled actions - that have not yet run. Early access feature is in a pre- - release state and might change or have limited support. - For more information, see - https://cloud.google.com/products#product-launch-stages. - - Returns: - Callable[[~.FinishDlpJobRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'finish_dlp_job' not in self._stubs: - self._stubs['finish_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/FinishDlpJob', - request_serializer=dlp.FinishDlpJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['finish_dlp_job'] - - -__all__ = ( - 'DlpServiceGrpcTransport', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py b/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py deleted file mode 100644 index 1854e30a..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/services/dlp_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,1248 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.dlp_v2.types import dlp -from google.protobuf import empty_pb2 # type: ignore -from .base import DlpServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import DlpServiceGrpcTransport - - -class DlpServiceGrpcAsyncIOTransport(DlpServiceTransport): - """gRPC AsyncIO backend transport for DlpService. - - The Cloud Data Loss Prevention (DLP) API is a service that - allows clients to detect the presence of Personally Identifiable - Information (PII) and other privacy-sensitive data in user- - supplied, unstructured data streams, like text blocks or images. - The service also includes methods for sensitive data redaction - and scheduling of data scans on Google Cloud Platform based data - sets. - To learn more about concepts and find how-to guides see - https://cloud.google.com/dlp/docs/. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'dlp.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'dlp.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def inspect_content(self) -> Callable[ - [dlp.InspectContentRequest], - Awaitable[dlp.InspectContentResponse]]: - r"""Return a callable for the inspect content method over gRPC. - - Finds potentially sensitive info in content. - This method has limits on input size, processing time, - and output size. - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - For how to guides, see - https://cloud.google.com/dlp/docs/inspecting-images and - https://cloud.google.com/dlp/docs/inspecting-text, - - Returns: - Callable[[~.InspectContentRequest], - Awaitable[~.InspectContentResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'inspect_content' not in self._stubs: - self._stubs['inspect_content'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/InspectContent', - request_serializer=dlp.InspectContentRequest.serialize, - response_deserializer=dlp.InspectContentResponse.deserialize, - ) - return self._stubs['inspect_content'] - - @property - def redact_image(self) -> Callable[ - [dlp.RedactImageRequest], - Awaitable[dlp.RedactImageResponse]]: - r"""Return a callable for the redact image method over gRPC. - - Redacts potentially sensitive info from an image. - This method has limits on input size, processing time, - and output size. See - https://cloud.google.com/dlp/docs/redacting-sensitive- - data-images to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Returns: - Callable[[~.RedactImageRequest], - Awaitable[~.RedactImageResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'redact_image' not in self._stubs: - self._stubs['redact_image'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/RedactImage', - request_serializer=dlp.RedactImageRequest.serialize, - response_deserializer=dlp.RedactImageResponse.deserialize, - ) - return self._stubs['redact_image'] - - @property - def deidentify_content(self) -> Callable[ - [dlp.DeidentifyContentRequest], - Awaitable[dlp.DeidentifyContentResponse]]: - r"""Return a callable for the deidentify content method over gRPC. - - De-identifies potentially sensitive info from a - ContentItem. This method has limits on input size and - output size. See - https://cloud.google.com/dlp/docs/deidentify-sensitive- - data to learn more. - - When no InfoTypes or CustomInfoTypes are specified in - this request, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Returns: - Callable[[~.DeidentifyContentRequest], - Awaitable[~.DeidentifyContentResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'deidentify_content' not in self._stubs: - self._stubs['deidentify_content'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeidentifyContent', - request_serializer=dlp.DeidentifyContentRequest.serialize, - response_deserializer=dlp.DeidentifyContentResponse.deserialize, - ) - return self._stubs['deidentify_content'] - - @property - def reidentify_content(self) -> Callable[ - [dlp.ReidentifyContentRequest], - Awaitable[dlp.ReidentifyContentResponse]]: - r"""Return a callable for the reidentify content method over gRPC. - - Re-identifies content that has been de-identified. See - https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example - to learn more. - - Returns: - Callable[[~.ReidentifyContentRequest], - Awaitable[~.ReidentifyContentResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'reidentify_content' not in self._stubs: - self._stubs['reidentify_content'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ReidentifyContent', - request_serializer=dlp.ReidentifyContentRequest.serialize, - response_deserializer=dlp.ReidentifyContentResponse.deserialize, - ) - return self._stubs['reidentify_content'] - - @property - def list_info_types(self) -> Callable[ - [dlp.ListInfoTypesRequest], - Awaitable[dlp.ListInfoTypesResponse]]: - r"""Return a callable for the list info types method over gRPC. - - Returns a list of the sensitive information types - that the DLP API supports. See - https://cloud.google.com/dlp/docs/infotypes-reference to - learn more. - - Returns: - Callable[[~.ListInfoTypesRequest], - Awaitable[~.ListInfoTypesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_info_types' not in self._stubs: - self._stubs['list_info_types'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListInfoTypes', - request_serializer=dlp.ListInfoTypesRequest.serialize, - response_deserializer=dlp.ListInfoTypesResponse.deserialize, - ) - return self._stubs['list_info_types'] - - @property - def create_inspect_template(self) -> Callable[ - [dlp.CreateInspectTemplateRequest], - Awaitable[dlp.InspectTemplate]]: - r"""Return a callable for the create inspect template method over gRPC. - - Creates an InspectTemplate for re-using frequently - used configuration for inspecting content, images, and - storage. See https://cloud.google.com/dlp/docs/creating- - templates to learn more. - - Returns: - Callable[[~.CreateInspectTemplateRequest], - Awaitable[~.InspectTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_inspect_template' not in self._stubs: - self._stubs['create_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateInspectTemplate', - request_serializer=dlp.CreateInspectTemplateRequest.serialize, - response_deserializer=dlp.InspectTemplate.deserialize, - ) - return self._stubs['create_inspect_template'] - - @property - def update_inspect_template(self) -> Callable[ - [dlp.UpdateInspectTemplateRequest], - Awaitable[dlp.InspectTemplate]]: - r"""Return a callable for the update inspect template method over gRPC. - - Updates the InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.UpdateInspectTemplateRequest], - Awaitable[~.InspectTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_inspect_template' not in self._stubs: - self._stubs['update_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateInspectTemplate', - request_serializer=dlp.UpdateInspectTemplateRequest.serialize, - response_deserializer=dlp.InspectTemplate.deserialize, - ) - return self._stubs['update_inspect_template'] - - @property - def get_inspect_template(self) -> Callable[ - [dlp.GetInspectTemplateRequest], - Awaitable[dlp.InspectTemplate]]: - r"""Return a callable for the get inspect template method over gRPC. - - Gets an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.GetInspectTemplateRequest], - Awaitable[~.InspectTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_inspect_template' not in self._stubs: - self._stubs['get_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetInspectTemplate', - request_serializer=dlp.GetInspectTemplateRequest.serialize, - response_deserializer=dlp.InspectTemplate.deserialize, - ) - return self._stubs['get_inspect_template'] - - @property - def list_inspect_templates(self) -> Callable[ - [dlp.ListInspectTemplatesRequest], - Awaitable[dlp.ListInspectTemplatesResponse]]: - r"""Return a callable for the list inspect templates method over gRPC. - - Lists InspectTemplates. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.ListInspectTemplatesRequest], - Awaitable[~.ListInspectTemplatesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_inspect_templates' not in self._stubs: - self._stubs['list_inspect_templates'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListInspectTemplates', - request_serializer=dlp.ListInspectTemplatesRequest.serialize, - response_deserializer=dlp.ListInspectTemplatesResponse.deserialize, - ) - return self._stubs['list_inspect_templates'] - - @property - def delete_inspect_template(self) -> Callable[ - [dlp.DeleteInspectTemplateRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete inspect template method over gRPC. - - Deletes an InspectTemplate. - See https://cloud.google.com/dlp/docs/creating-templates - to learn more. - - Returns: - Callable[[~.DeleteInspectTemplateRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_inspect_template' not in self._stubs: - self._stubs['delete_inspect_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteInspectTemplate', - request_serializer=dlp.DeleteInspectTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_inspect_template'] - - @property - def create_deidentify_template(self) -> Callable[ - [dlp.CreateDeidentifyTemplateRequest], - Awaitable[dlp.DeidentifyTemplate]]: - r"""Return a callable for the create deidentify template method over gRPC. - - Creates a DeidentifyTemplate for re-using frequently - used configuration for de-identifying content, images, - and storage. See - https://cloud.google.com/dlp/docs/creating-templates- - deid to learn more. - - Returns: - Callable[[~.CreateDeidentifyTemplateRequest], - Awaitable[~.DeidentifyTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_deidentify_template' not in self._stubs: - self._stubs['create_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateDeidentifyTemplate', - request_serializer=dlp.CreateDeidentifyTemplateRequest.serialize, - response_deserializer=dlp.DeidentifyTemplate.deserialize, - ) - return self._stubs['create_deidentify_template'] - - @property - def update_deidentify_template(self) -> Callable[ - [dlp.UpdateDeidentifyTemplateRequest], - Awaitable[dlp.DeidentifyTemplate]]: - r"""Return a callable for the update deidentify template method over gRPC. - - Updates the DeidentifyTemplate. - See https://cloud.google.com/dlp/docs/creating- - templates-deid to learn more. - - Returns: - Callable[[~.UpdateDeidentifyTemplateRequest], - Awaitable[~.DeidentifyTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_deidentify_template' not in self._stubs: - self._stubs['update_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateDeidentifyTemplate', - request_serializer=dlp.UpdateDeidentifyTemplateRequest.serialize, - response_deserializer=dlp.DeidentifyTemplate.deserialize, - ) - return self._stubs['update_deidentify_template'] - - @property - def get_deidentify_template(self) -> Callable[ - [dlp.GetDeidentifyTemplateRequest], - Awaitable[dlp.DeidentifyTemplate]]: - r"""Return a callable for the get deidentify template method over gRPC. - - Gets a DeidentifyTemplate. - See https://cloud.google.com/dlp/docs/creating- - templates-deid to learn more. - - Returns: - Callable[[~.GetDeidentifyTemplateRequest], - Awaitable[~.DeidentifyTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_deidentify_template' not in self._stubs: - self._stubs['get_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetDeidentifyTemplate', - request_serializer=dlp.GetDeidentifyTemplateRequest.serialize, - response_deserializer=dlp.DeidentifyTemplate.deserialize, - ) - return self._stubs['get_deidentify_template'] - - @property - def list_deidentify_templates(self) -> Callable[ - [dlp.ListDeidentifyTemplatesRequest], - Awaitable[dlp.ListDeidentifyTemplatesResponse]]: - r"""Return a callable for the list deidentify templates method over gRPC. - - Lists DeidentifyTemplates. - See https://cloud.google.com/dlp/docs/creating- - templates-deid to learn more. - - Returns: - Callable[[~.ListDeidentifyTemplatesRequest], - Awaitable[~.ListDeidentifyTemplatesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_deidentify_templates' not in self._stubs: - self._stubs['list_deidentify_templates'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListDeidentifyTemplates', - request_serializer=dlp.ListDeidentifyTemplatesRequest.serialize, - response_deserializer=dlp.ListDeidentifyTemplatesResponse.deserialize, - ) - return self._stubs['list_deidentify_templates'] - - @property - def delete_deidentify_template(self) -> Callable[ - [dlp.DeleteDeidentifyTemplateRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete deidentify template method over gRPC. - - Deletes a DeidentifyTemplate. - See https://cloud.google.com/dlp/docs/creating- - templates-deid to learn more. - - Returns: - Callable[[~.DeleteDeidentifyTemplateRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_deidentify_template' not in self._stubs: - self._stubs['delete_deidentify_template'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteDeidentifyTemplate', - request_serializer=dlp.DeleteDeidentifyTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_deidentify_template'] - - @property - def create_job_trigger(self) -> Callable[ - [dlp.CreateJobTriggerRequest], - Awaitable[dlp.JobTrigger]]: - r"""Return a callable for the create job trigger method over gRPC. - - Creates a job trigger to run DLP actions such as - scanning storage for sensitive information on a set - schedule. See - https://cloud.google.com/dlp/docs/creating-job-triggers - to learn more. - - Returns: - Callable[[~.CreateJobTriggerRequest], - Awaitable[~.JobTrigger]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job_trigger' not in self._stubs: - self._stubs['create_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateJobTrigger', - request_serializer=dlp.CreateJobTriggerRequest.serialize, - response_deserializer=dlp.JobTrigger.deserialize, - ) - return self._stubs['create_job_trigger'] - - @property - def update_job_trigger(self) -> Callable[ - [dlp.UpdateJobTriggerRequest], - Awaitable[dlp.JobTrigger]]: - r"""Return a callable for the update job trigger method over gRPC. - - Updates a job trigger. - See https://cloud.google.com/dlp/docs/creating-job- - triggers to learn more. - - Returns: - Callable[[~.UpdateJobTriggerRequest], - Awaitable[~.JobTrigger]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_job_trigger' not in self._stubs: - self._stubs['update_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateJobTrigger', - request_serializer=dlp.UpdateJobTriggerRequest.serialize, - response_deserializer=dlp.JobTrigger.deserialize, - ) - return self._stubs['update_job_trigger'] - - @property - def hybrid_inspect_job_trigger(self) -> Callable[ - [dlp.HybridInspectJobTriggerRequest], - Awaitable[dlp.HybridInspectResponse]]: - r"""Return a callable for the hybrid inspect job trigger method over gRPC. - - Inspect hybrid content and store findings to a - trigger. The inspection will be processed - asynchronously. To review the findings monitor the jobs - within the trigger. - Early access feature is in a pre-release state and might - change or have limited support. For more information, - see - https://cloud.google.com/products#product-launch-stages. - - Returns: - Callable[[~.HybridInspectJobTriggerRequest], - Awaitable[~.HybridInspectResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'hybrid_inspect_job_trigger' not in self._stubs: - self._stubs['hybrid_inspect_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/HybridInspectJobTrigger', - request_serializer=dlp.HybridInspectJobTriggerRequest.serialize, - response_deserializer=dlp.HybridInspectResponse.deserialize, - ) - return self._stubs['hybrid_inspect_job_trigger'] - - @property - def get_job_trigger(self) -> Callable[ - [dlp.GetJobTriggerRequest], - Awaitable[dlp.JobTrigger]]: - r"""Return a callable for the get job trigger method over gRPC. - - Gets a job trigger. - See https://cloud.google.com/dlp/docs/creating-job- - triggers to learn more. - - Returns: - Callable[[~.GetJobTriggerRequest], - Awaitable[~.JobTrigger]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job_trigger' not in self._stubs: - self._stubs['get_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetJobTrigger', - request_serializer=dlp.GetJobTriggerRequest.serialize, - response_deserializer=dlp.JobTrigger.deserialize, - ) - return self._stubs['get_job_trigger'] - - @property - def list_job_triggers(self) -> Callable[ - [dlp.ListJobTriggersRequest], - Awaitable[dlp.ListJobTriggersResponse]]: - r"""Return a callable for the list job triggers method over gRPC. - - Lists job triggers. - See https://cloud.google.com/dlp/docs/creating-job- - triggers to learn more. - - Returns: - Callable[[~.ListJobTriggersRequest], - Awaitable[~.ListJobTriggersResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_job_triggers' not in self._stubs: - self._stubs['list_job_triggers'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListJobTriggers', - request_serializer=dlp.ListJobTriggersRequest.serialize, - response_deserializer=dlp.ListJobTriggersResponse.deserialize, - ) - return self._stubs['list_job_triggers'] - - @property - def delete_job_trigger(self) -> Callable[ - [dlp.DeleteJobTriggerRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete job trigger method over gRPC. - - Deletes a job trigger. - See https://cloud.google.com/dlp/docs/creating-job- - triggers to learn more. - - Returns: - Callable[[~.DeleteJobTriggerRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_job_trigger' not in self._stubs: - self._stubs['delete_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteJobTrigger', - request_serializer=dlp.DeleteJobTriggerRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_job_trigger'] - - @property - def activate_job_trigger(self) -> Callable[ - [dlp.ActivateJobTriggerRequest], - Awaitable[dlp.DlpJob]]: - r"""Return a callable for the activate job trigger method over gRPC. - - Activate a job trigger. Causes the immediate execute - of a trigger instead of waiting on the trigger event to - occur. - - Returns: - Callable[[~.ActivateJobTriggerRequest], - Awaitable[~.DlpJob]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'activate_job_trigger' not in self._stubs: - self._stubs['activate_job_trigger'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ActivateJobTrigger', - request_serializer=dlp.ActivateJobTriggerRequest.serialize, - response_deserializer=dlp.DlpJob.deserialize, - ) - return self._stubs['activate_job_trigger'] - - @property - def create_dlp_job(self) -> Callable[ - [dlp.CreateDlpJobRequest], - Awaitable[dlp.DlpJob]]: - r"""Return a callable for the create dlp job method over gRPC. - - Creates a new job to inspect storage or calculate - risk metrics. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - When no InfoTypes or CustomInfoTypes are specified in - inspect jobs, the system will automatically choose what - detectors to run. By default this may be all types, but - may change over time as detectors are updated. - - Returns: - Callable[[~.CreateDlpJobRequest], - Awaitable[~.DlpJob]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_dlp_job' not in self._stubs: - self._stubs['create_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateDlpJob', - request_serializer=dlp.CreateDlpJobRequest.serialize, - response_deserializer=dlp.DlpJob.deserialize, - ) - return self._stubs['create_dlp_job'] - - @property - def list_dlp_jobs(self) -> Callable[ - [dlp.ListDlpJobsRequest], - Awaitable[dlp.ListDlpJobsResponse]]: - r"""Return a callable for the list dlp jobs method over gRPC. - - Lists DlpJobs that match the specified filter in the - request. See - https://cloud.google.com/dlp/docs/inspecting-storage and - https://cloud.google.com/dlp/docs/compute-risk-analysis - to learn more. - - Returns: - Callable[[~.ListDlpJobsRequest], - Awaitable[~.ListDlpJobsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_dlp_jobs' not in self._stubs: - self._stubs['list_dlp_jobs'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListDlpJobs', - request_serializer=dlp.ListDlpJobsRequest.serialize, - response_deserializer=dlp.ListDlpJobsResponse.deserialize, - ) - return self._stubs['list_dlp_jobs'] - - @property - def get_dlp_job(self) -> Callable[ - [dlp.GetDlpJobRequest], - Awaitable[dlp.DlpJob]]: - r"""Return a callable for the get dlp job method over gRPC. - - Gets the latest state of a long-running DlpJob. - See https://cloud.google.com/dlp/docs/inspecting-storage - and https://cloud.google.com/dlp/docs/compute-risk- - analysis to learn more. - - Returns: - Callable[[~.GetDlpJobRequest], - Awaitable[~.DlpJob]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_dlp_job' not in self._stubs: - self._stubs['get_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetDlpJob', - request_serializer=dlp.GetDlpJobRequest.serialize, - response_deserializer=dlp.DlpJob.deserialize, - ) - return self._stubs['get_dlp_job'] - - @property - def delete_dlp_job(self) -> Callable[ - [dlp.DeleteDlpJobRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete dlp job method over gRPC. - - Deletes a long-running DlpJob. This method indicates - that the client is no longer interested in the DlpJob - result. The job will be cancelled if possible. - See https://cloud.google.com/dlp/docs/inspecting-storage - and https://cloud.google.com/dlp/docs/compute-risk- - analysis to learn more. - - Returns: - Callable[[~.DeleteDlpJobRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_dlp_job' not in self._stubs: - self._stubs['delete_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteDlpJob', - request_serializer=dlp.DeleteDlpJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_dlp_job'] - - @property - def cancel_dlp_job(self) -> Callable[ - [dlp.CancelDlpJobRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the cancel dlp job method over gRPC. - - Starts asynchronous cancellation on a long-running - DlpJob. The server makes a best effort to cancel the - DlpJob, but success is not guaranteed. - See https://cloud.google.com/dlp/docs/inspecting-storage - and https://cloud.google.com/dlp/docs/compute-risk- - analysis to learn more. - - Returns: - Callable[[~.CancelDlpJobRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'cancel_dlp_job' not in self._stubs: - self._stubs['cancel_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CancelDlpJob', - request_serializer=dlp.CancelDlpJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['cancel_dlp_job'] - - @property - def create_stored_info_type(self) -> Callable[ - [dlp.CreateStoredInfoTypeRequest], - Awaitable[dlp.StoredInfoType]]: - r"""Return a callable for the create stored info type method over gRPC. - - Creates a pre-built stored infoType to be used for - inspection. See - https://cloud.google.com/dlp/docs/creating-stored- - infotypes to learn more. - - Returns: - Callable[[~.CreateStoredInfoTypeRequest], - Awaitable[~.StoredInfoType]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_stored_info_type' not in self._stubs: - self._stubs['create_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/CreateStoredInfoType', - request_serializer=dlp.CreateStoredInfoTypeRequest.serialize, - response_deserializer=dlp.StoredInfoType.deserialize, - ) - return self._stubs['create_stored_info_type'] - - @property - def update_stored_info_type(self) -> Callable[ - [dlp.UpdateStoredInfoTypeRequest], - Awaitable[dlp.StoredInfoType]]: - r"""Return a callable for the update stored info type method over gRPC. - - Updates the stored infoType by creating a new - version. The existing version will continue to be used - until the new version is ready. See - https://cloud.google.com/dlp/docs/creating-stored- - infotypes to learn more. - - Returns: - Callable[[~.UpdateStoredInfoTypeRequest], - Awaitable[~.StoredInfoType]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'update_stored_info_type' not in self._stubs: - self._stubs['update_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/UpdateStoredInfoType', - request_serializer=dlp.UpdateStoredInfoTypeRequest.serialize, - response_deserializer=dlp.StoredInfoType.deserialize, - ) - return self._stubs['update_stored_info_type'] - - @property - def get_stored_info_type(self) -> Callable[ - [dlp.GetStoredInfoTypeRequest], - Awaitable[dlp.StoredInfoType]]: - r"""Return a callable for the get stored info type method over gRPC. - - Gets a stored infoType. - See https://cloud.google.com/dlp/docs/creating-stored- - infotypes to learn more. - - Returns: - Callable[[~.GetStoredInfoTypeRequest], - Awaitable[~.StoredInfoType]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_stored_info_type' not in self._stubs: - self._stubs['get_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/GetStoredInfoType', - request_serializer=dlp.GetStoredInfoTypeRequest.serialize, - response_deserializer=dlp.StoredInfoType.deserialize, - ) - return self._stubs['get_stored_info_type'] - - @property - def list_stored_info_types(self) -> Callable[ - [dlp.ListStoredInfoTypesRequest], - Awaitable[dlp.ListStoredInfoTypesResponse]]: - r"""Return a callable for the list stored info types method over gRPC. - - Lists stored infoTypes. - See https://cloud.google.com/dlp/docs/creating-stored- - infotypes to learn more. - - Returns: - Callable[[~.ListStoredInfoTypesRequest], - Awaitable[~.ListStoredInfoTypesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_stored_info_types' not in self._stubs: - self._stubs['list_stored_info_types'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/ListStoredInfoTypes', - request_serializer=dlp.ListStoredInfoTypesRequest.serialize, - response_deserializer=dlp.ListStoredInfoTypesResponse.deserialize, - ) - return self._stubs['list_stored_info_types'] - - @property - def delete_stored_info_type(self) -> Callable[ - [dlp.DeleteStoredInfoTypeRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete stored info type method over gRPC. - - Deletes a stored infoType. - See https://cloud.google.com/dlp/docs/creating-stored- - infotypes to learn more. - - Returns: - Callable[[~.DeleteStoredInfoTypeRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_stored_info_type' not in self._stubs: - self._stubs['delete_stored_info_type'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/DeleteStoredInfoType', - request_serializer=dlp.DeleteStoredInfoTypeRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_stored_info_type'] - - @property - def hybrid_inspect_dlp_job(self) -> Callable[ - [dlp.HybridInspectDlpJobRequest], - Awaitable[dlp.HybridInspectResponse]]: - r"""Return a callable for the hybrid inspect dlp job method over gRPC. - - Inspect hybrid content and store findings to a job. - To review the findings inspect the job. Inspection will - occur asynchronously. - Early access feature is in a pre-release state and might - change or have limited support. For more information, - see - https://cloud.google.com/products#product-launch-stages. - - Returns: - Callable[[~.HybridInspectDlpJobRequest], - Awaitable[~.HybridInspectResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'hybrid_inspect_dlp_job' not in self._stubs: - self._stubs['hybrid_inspect_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/HybridInspectDlpJob', - request_serializer=dlp.HybridInspectDlpJobRequest.serialize, - response_deserializer=dlp.HybridInspectResponse.deserialize, - ) - return self._stubs['hybrid_inspect_dlp_job'] - - @property - def finish_dlp_job(self) -> Callable[ - [dlp.FinishDlpJobRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the finish dlp job method over gRPC. - - Finish a running hybrid DlpJob. Triggers the - finalization steps and running of any enabled actions - that have not yet run. Early access feature is in a pre- - release state and might change or have limited support. - For more information, see - https://cloud.google.com/products#product-launch-stages. - - Returns: - Callable[[~.FinishDlpJobRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'finish_dlp_job' not in self._stubs: - self._stubs['finish_dlp_job'] = self.grpc_channel.unary_unary( - '/google.privacy.dlp.v2.DlpService/FinishDlpJob', - request_serializer=dlp.FinishDlpJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['finish_dlp_job'] - - -__all__ = ( - 'DlpServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py b/owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py deleted file mode 100644 index 56197758..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/types/__init__.py +++ /dev/null @@ -1,332 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .dlp import ( - Action, - ActivateJobTriggerRequest, - AnalyzeDataSourceRiskDetails, - BoundingBox, - BucketingConfig, - ByteContentItem, - CancelDlpJobRequest, - CharacterMaskConfig, - CharsToIgnore, - Color, - Container, - ContentItem, - ContentLocation, - CreateDeidentifyTemplateRequest, - CreateDlpJobRequest, - CreateInspectTemplateRequest, - CreateJobTriggerRequest, - CreateStoredInfoTypeRequest, - CryptoDeterministicConfig, - CryptoHashConfig, - CryptoKey, - CryptoReplaceFfxFpeConfig, - DateShiftConfig, - DateTime, - DeidentifyConfig, - DeidentifyContentRequest, - DeidentifyContentResponse, - DeidentifyTemplate, - DeleteDeidentifyTemplateRequest, - DeleteDlpJobRequest, - DeleteInspectTemplateRequest, - DeleteJobTriggerRequest, - DeleteStoredInfoTypeRequest, - DlpJob, - DocumentLocation, - Error, - ExcludeInfoTypes, - ExclusionRule, - FieldTransformation, - Finding, - FinishDlpJobRequest, - FixedSizeBucketingConfig, - GetDeidentifyTemplateRequest, - GetDlpJobRequest, - GetInspectTemplateRequest, - GetJobTriggerRequest, - GetStoredInfoTypeRequest, - HybridContentItem, - HybridFindingDetails, - HybridInspectDlpJobRequest, - HybridInspectJobTriggerRequest, - HybridInspectResponse, - HybridInspectStatistics, - ImageLocation, - InfoTypeDescription, - InfoTypeStats, - InfoTypeTransformations, - InspectConfig, - InspectContentRequest, - InspectContentResponse, - InspectDataSourceDetails, - InspectionRule, - InspectionRuleSet, - InspectJobConfig, - InspectResult, - InspectTemplate, - JobTrigger, - KmsWrappedCryptoKey, - LargeCustomDictionaryConfig, - LargeCustomDictionaryStats, - ListDeidentifyTemplatesRequest, - ListDeidentifyTemplatesResponse, - ListDlpJobsRequest, - ListDlpJobsResponse, - ListInfoTypesRequest, - ListInfoTypesResponse, - ListInspectTemplatesRequest, - ListInspectTemplatesResponse, - ListJobTriggersRequest, - ListJobTriggersResponse, - ListStoredInfoTypesRequest, - ListStoredInfoTypesResponse, - Location, - Manual, - MetadataLocation, - OutputStorageConfig, - PrimitiveTransformation, - PrivacyMetric, - QuasiId, - QuoteInfo, - Range, - RecordCondition, - RecordLocation, - RecordSuppression, - RecordTransformations, - RedactConfig, - RedactImageRequest, - RedactImageResponse, - ReidentifyContentRequest, - ReidentifyContentResponse, - ReplaceValueConfig, - ReplaceWithInfoTypeConfig, - RiskAnalysisJobConfig, - Schedule, - StatisticalTable, - StorageMetadataLabel, - StoredInfoType, - StoredInfoTypeConfig, - StoredInfoTypeStats, - StoredInfoTypeVersion, - Table, - TableLocation, - TimePartConfig, - TransformationErrorHandling, - TransformationOverview, - TransformationSummary, - TransientCryptoKey, - UnwrappedCryptoKey, - UpdateDeidentifyTemplateRequest, - UpdateInspectTemplateRequest, - UpdateJobTriggerRequest, - UpdateStoredInfoTypeRequest, - Value, - ValueFrequency, - ContentOption, - DlpJobType, - InfoTypeSupportedBy, - MatchingType, - MetadataType, - RelationalOperator, - StoredInfoTypeState, -) -from .storage import ( - BigQueryField, - BigQueryKey, - BigQueryOptions, - BigQueryTable, - CloudStorageFileSet, - CloudStorageOptions, - CloudStoragePath, - CloudStorageRegexFileSet, - CustomInfoType, - DatastoreKey, - DatastoreOptions, - EntityId, - FieldId, - HybridOptions, - InfoType, - Key, - KindExpression, - PartitionId, - RecordKey, - StorageConfig, - StoredType, - TableOptions, - FileType, - Likelihood, -) - -__all__ = ( - 'Action', - 'ActivateJobTriggerRequest', - 'AnalyzeDataSourceRiskDetails', - 'BoundingBox', - 'BucketingConfig', - 'ByteContentItem', - 'CancelDlpJobRequest', - 'CharacterMaskConfig', - 'CharsToIgnore', - 'Color', - 'Container', - 'ContentItem', - 'ContentLocation', - 'CreateDeidentifyTemplateRequest', - 'CreateDlpJobRequest', - 'CreateInspectTemplateRequest', - 'CreateJobTriggerRequest', - 'CreateStoredInfoTypeRequest', - 'CryptoDeterministicConfig', - 'CryptoHashConfig', - 'CryptoKey', - 'CryptoReplaceFfxFpeConfig', - 'DateShiftConfig', - 'DateTime', - 'DeidentifyConfig', - 'DeidentifyContentRequest', - 'DeidentifyContentResponse', - 'DeidentifyTemplate', - 'DeleteDeidentifyTemplateRequest', - 'DeleteDlpJobRequest', - 'DeleteInspectTemplateRequest', - 'DeleteJobTriggerRequest', - 'DeleteStoredInfoTypeRequest', - 'DlpJob', - 'DocumentLocation', - 'Error', - 'ExcludeInfoTypes', - 'ExclusionRule', - 'FieldTransformation', - 'Finding', - 'FinishDlpJobRequest', - 'FixedSizeBucketingConfig', - 'GetDeidentifyTemplateRequest', - 'GetDlpJobRequest', - 'GetInspectTemplateRequest', - 'GetJobTriggerRequest', - 'GetStoredInfoTypeRequest', - 'HybridContentItem', - 'HybridFindingDetails', - 'HybridInspectDlpJobRequest', - 'HybridInspectJobTriggerRequest', - 'HybridInspectResponse', - 'HybridInspectStatistics', - 'ImageLocation', - 'InfoTypeDescription', - 'InfoTypeStats', - 'InfoTypeTransformations', - 'InspectConfig', - 'InspectContentRequest', - 'InspectContentResponse', - 'InspectDataSourceDetails', - 'InspectionRule', - 'InspectionRuleSet', - 'InspectJobConfig', - 'InspectResult', - 'InspectTemplate', - 'JobTrigger', - 'KmsWrappedCryptoKey', - 'LargeCustomDictionaryConfig', - 'LargeCustomDictionaryStats', - 'ListDeidentifyTemplatesRequest', - 'ListDeidentifyTemplatesResponse', - 'ListDlpJobsRequest', - 'ListDlpJobsResponse', - 'ListInfoTypesRequest', - 'ListInfoTypesResponse', - 'ListInspectTemplatesRequest', - 'ListInspectTemplatesResponse', - 'ListJobTriggersRequest', - 'ListJobTriggersResponse', - 'ListStoredInfoTypesRequest', - 'ListStoredInfoTypesResponse', - 'Location', - 'Manual', - 'MetadataLocation', - 'OutputStorageConfig', - 'PrimitiveTransformation', - 'PrivacyMetric', - 'QuasiId', - 'QuoteInfo', - 'Range', - 'RecordCondition', - 'RecordLocation', - 'RecordSuppression', - 'RecordTransformations', - 'RedactConfig', - 'RedactImageRequest', - 'RedactImageResponse', - 'ReidentifyContentRequest', - 'ReidentifyContentResponse', - 'ReplaceValueConfig', - 'ReplaceWithInfoTypeConfig', - 'RiskAnalysisJobConfig', - 'Schedule', - 'StatisticalTable', - 'StorageMetadataLabel', - 'StoredInfoType', - 'StoredInfoTypeConfig', - 'StoredInfoTypeStats', - 'StoredInfoTypeVersion', - 'Table', - 'TableLocation', - 'TimePartConfig', - 'TransformationErrorHandling', - 'TransformationOverview', - 'TransformationSummary', - 'TransientCryptoKey', - 'UnwrappedCryptoKey', - 'UpdateDeidentifyTemplateRequest', - 'UpdateInspectTemplateRequest', - 'UpdateJobTriggerRequest', - 'UpdateStoredInfoTypeRequest', - 'Value', - 'ValueFrequency', - 'ContentOption', - 'DlpJobType', - 'InfoTypeSupportedBy', - 'MatchingType', - 'MetadataType', - 'RelationalOperator', - 'StoredInfoTypeState', - 'BigQueryField', - 'BigQueryKey', - 'BigQueryOptions', - 'BigQueryTable', - 'CloudStorageFileSet', - 'CloudStorageOptions', - 'CloudStoragePath', - 'CloudStorageRegexFileSet', - 'CustomInfoType', - 'DatastoreKey', - 'DatastoreOptions', - 'EntityId', - 'FieldId', - 'HybridOptions', - 'InfoType', - 'Key', - 'KindExpression', - 'PartitionId', - 'RecordKey', - 'StorageConfig', - 'StoredType', - 'TableOptions', - 'FileType', - 'Likelihood', -) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py b/owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py deleted file mode 100644 index d97d7b8c..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/types/dlp.py +++ /dev/null @@ -1,6338 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.cloud.dlp_v2.types import storage -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import empty_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from google.type import date_pb2 # type: ignore -from google.type import dayofweek_pb2 # type: ignore -from google.type import timeofday_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.privacy.dlp.v2', - manifest={ - 'RelationalOperator', - 'MatchingType', - 'ContentOption', - 'MetadataType', - 'InfoTypeSupportedBy', - 'DlpJobType', - 'StoredInfoTypeState', - 'ExcludeInfoTypes', - 'ExclusionRule', - 'InspectionRule', - 'InspectionRuleSet', - 'InspectConfig', - 'ByteContentItem', - 'ContentItem', - 'Table', - 'InspectResult', - 'Finding', - 'Location', - 'ContentLocation', - 'MetadataLocation', - 'StorageMetadataLabel', - 'DocumentLocation', - 'RecordLocation', - 'TableLocation', - 'Container', - 'Range', - 'ImageLocation', - 'BoundingBox', - 'RedactImageRequest', - 'Color', - 'RedactImageResponse', - 'DeidentifyContentRequest', - 'DeidentifyContentResponse', - 'ReidentifyContentRequest', - 'ReidentifyContentResponse', - 'InspectContentRequest', - 'InspectContentResponse', - 'OutputStorageConfig', - 'InfoTypeStats', - 'InspectDataSourceDetails', - 'HybridInspectStatistics', - 'InfoTypeDescription', - 'ListInfoTypesRequest', - 'ListInfoTypesResponse', - 'RiskAnalysisJobConfig', - 'QuasiId', - 'StatisticalTable', - 'PrivacyMetric', - 'AnalyzeDataSourceRiskDetails', - 'ValueFrequency', - 'Value', - 'QuoteInfo', - 'DateTime', - 'DeidentifyConfig', - 'TransformationErrorHandling', - 'PrimitiveTransformation', - 'TimePartConfig', - 'CryptoHashConfig', - 'CryptoDeterministicConfig', - 'ReplaceValueConfig', - 'ReplaceWithInfoTypeConfig', - 'RedactConfig', - 'CharsToIgnore', - 'CharacterMaskConfig', - 'FixedSizeBucketingConfig', - 'BucketingConfig', - 'CryptoReplaceFfxFpeConfig', - 'CryptoKey', - 'TransientCryptoKey', - 'UnwrappedCryptoKey', - 'KmsWrappedCryptoKey', - 'DateShiftConfig', - 'InfoTypeTransformations', - 'FieldTransformation', - 'RecordTransformations', - 'RecordSuppression', - 'RecordCondition', - 'TransformationOverview', - 'TransformationSummary', - 'Schedule', - 'Manual', - 'InspectTemplate', - 'DeidentifyTemplate', - 'Error', - 'JobTrigger', - 'Action', - 'CreateInspectTemplateRequest', - 'UpdateInspectTemplateRequest', - 'GetInspectTemplateRequest', - 'ListInspectTemplatesRequest', - 'ListInspectTemplatesResponse', - 'DeleteInspectTemplateRequest', - 'CreateJobTriggerRequest', - 'ActivateJobTriggerRequest', - 'UpdateJobTriggerRequest', - 'GetJobTriggerRequest', - 'CreateDlpJobRequest', - 'ListJobTriggersRequest', - 'ListJobTriggersResponse', - 'DeleteJobTriggerRequest', - 'InspectJobConfig', - 'DlpJob', - 'GetDlpJobRequest', - 'ListDlpJobsRequest', - 'ListDlpJobsResponse', - 'CancelDlpJobRequest', - 'FinishDlpJobRequest', - 'DeleteDlpJobRequest', - 'CreateDeidentifyTemplateRequest', - 'UpdateDeidentifyTemplateRequest', - 'GetDeidentifyTemplateRequest', - 'ListDeidentifyTemplatesRequest', - 'ListDeidentifyTemplatesResponse', - 'DeleteDeidentifyTemplateRequest', - 'LargeCustomDictionaryConfig', - 'LargeCustomDictionaryStats', - 'StoredInfoTypeConfig', - 'StoredInfoTypeStats', - 'StoredInfoTypeVersion', - 'StoredInfoType', - 'CreateStoredInfoTypeRequest', - 'UpdateStoredInfoTypeRequest', - 'GetStoredInfoTypeRequest', - 'ListStoredInfoTypesRequest', - 'ListStoredInfoTypesResponse', - 'DeleteStoredInfoTypeRequest', - 'HybridInspectJobTriggerRequest', - 'HybridInspectDlpJobRequest', - 'HybridContentItem', - 'HybridFindingDetails', - 'HybridInspectResponse', - }, -) - - -class RelationalOperator(proto.Enum): - r"""Operators available for comparing the value of fields.""" - RELATIONAL_OPERATOR_UNSPECIFIED = 0 - EQUAL_TO = 1 - NOT_EQUAL_TO = 2 - GREATER_THAN = 3 - LESS_THAN = 4 - GREATER_THAN_OR_EQUALS = 5 - LESS_THAN_OR_EQUALS = 6 - EXISTS = 7 - - -class MatchingType(proto.Enum): - r"""Type of the match which can be applied to different ways of - matching, like Dictionary, regular expression and intersecting - with findings of another info type. - """ - MATCHING_TYPE_UNSPECIFIED = 0 - MATCHING_TYPE_FULL_MATCH = 1 - MATCHING_TYPE_PARTIAL_MATCH = 2 - MATCHING_TYPE_INVERSE_MATCH = 3 - - -class ContentOption(proto.Enum): - r"""Options describing which parts of the provided content should - be scanned. - """ - CONTENT_UNSPECIFIED = 0 - CONTENT_TEXT = 1 - CONTENT_IMAGE = 2 - - -class MetadataType(proto.Enum): - r"""Type of metadata containing the finding.""" - METADATATYPE_UNSPECIFIED = 0 - STORAGE_METADATA = 2 - - -class InfoTypeSupportedBy(proto.Enum): - r"""Parts of the APIs which use certain infoTypes.""" - ENUM_TYPE_UNSPECIFIED = 0 - INSPECT = 1 - RISK_ANALYSIS = 2 - - -class DlpJobType(proto.Enum): - r"""An enum to represent the various types of DLP jobs.""" - DLP_JOB_TYPE_UNSPECIFIED = 0 - INSPECT_JOB = 1 - RISK_ANALYSIS_JOB = 2 - - -class StoredInfoTypeState(proto.Enum): - r"""State of a StoredInfoType version.""" - STORED_INFO_TYPE_STATE_UNSPECIFIED = 0 - PENDING = 1 - READY = 2 - FAILED = 3 - INVALID = 4 - - -class ExcludeInfoTypes(proto.Message): - r"""List of exclude infoTypes. - Attributes: - info_types (Sequence[google.cloud.dlp_v2.types.InfoType]): - InfoType list in ExclusionRule rule drops a finding when it - overlaps or contained within with a finding of an infoType - from this list. For example, for - ``InspectionRuleSet.info_types`` containing - "PHONE_NUMBER"``and``\ exclusion_rule\ ``containing``\ exclude_info_types.info_types\` - with "EMAIL_ADDRESS" the phone number findings are dropped - if they overlap with EMAIL_ADDRESS finding. That leads to - "555-222-2222@example.org" to generate only a single - finding, namely email address. - """ - - info_types = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - - -class ExclusionRule(proto.Message): - r"""The rule that specifies conditions when findings of infoTypes - specified in ``InspectionRuleSet`` are removed from results. - - Attributes: - dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): - Dictionary which defines the rule. - regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): - Regular expression which defines the rule. - exclude_info_types (google.cloud.dlp_v2.types.ExcludeInfoTypes): - Set of infoTypes for which findings would - affect this rule. - matching_type (google.cloud.dlp_v2.types.MatchingType): - How the rule is applied, see MatchingType - documentation for details. - """ - - dictionary = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message=storage.CustomInfoType.Dictionary, - ) - regex = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message=storage.CustomInfoType.Regex, - ) - exclude_info_types = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message='ExcludeInfoTypes', - ) - matching_type = proto.Field( - proto.ENUM, - number=4, - enum='MatchingType', - ) - - -class InspectionRule(proto.Message): - r"""A single inspection rule to be applied to infoTypes, specified in - ``InspectionRuleSet``. - - Attributes: - hotword_rule (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.HotwordRule): - Hotword-based detection rule. - exclusion_rule (google.cloud.dlp_v2.types.ExclusionRule): - Exclusion rule. - """ - - hotword_rule = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message=storage.CustomInfoType.DetectionRule.HotwordRule, - ) - exclusion_rule = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message='ExclusionRule', - ) - - -class InspectionRuleSet(proto.Message): - r"""Rule set for modifying a set of infoTypes to alter behavior - under certain circumstances, depending on the specific details - of the rules within the set. - - Attributes: - info_types (Sequence[google.cloud.dlp_v2.types.InfoType]): - List of infoTypes this rule set is applied - to. - rules (Sequence[google.cloud.dlp_v2.types.InspectionRule]): - Set of rules to be applied to infoTypes. The - rules are applied in order. - """ - - info_types = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - rules = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='InspectionRule', - ) - - -class InspectConfig(proto.Message): - r"""Configuration description of the scanning process. When used with - redactContent only info_types and min_likelihood are currently used. - - Attributes: - info_types (Sequence[google.cloud.dlp_v2.types.InfoType]): - Restricts what info_types to look for. The values must - correspond to InfoType values returned by ListInfoTypes or - listed at - https://cloud.google.com/dlp/docs/infotypes-reference. - - When no InfoTypes or CustomInfoTypes are specified in a - request, the system may automatically choose what detectors - to run. By default this may be all types, but may change - over time as detectors are updated. - - If you need precise control and predictability as to what - detectors are run you should specify specific InfoTypes - listed in the reference, otherwise a default list will be - used, which may change over time. - min_likelihood (google.cloud.dlp_v2.types.Likelihood): - Only returns findings equal or above this - threshold. The default is POSSIBLE. - See https://cloud.google.com/dlp/docs/likelihood - to learn more. - limits (google.cloud.dlp_v2.types.InspectConfig.FindingLimits): - Configuration to control the number of - findings returned. - include_quote (bool): - When true, a contextual quote from the data - that triggered a finding is included in the - response; see Finding.quote. - exclude_info_types (bool): - When true, excludes type information of the - findings. - custom_info_types (Sequence[google.cloud.dlp_v2.types.CustomInfoType]): - CustomInfoTypes provided by the user. See - https://cloud.google.com/dlp/docs/creating- - custom-infotypes to learn more. - content_options (Sequence[google.cloud.dlp_v2.types.ContentOption]): - List of options defining data content to - scan. If empty, text, images, and other content - will be included. - rule_set (Sequence[google.cloud.dlp_v2.types.InspectionRuleSet]): - Set of rules to apply to the findings for - this InspectConfig. Exclusion rules, contained - in the set are executed in the end, other rules - are executed in the order they are specified for - each info type. - """ - - class FindingLimits(proto.Message): - r"""Configuration to control the number of findings returned. - Attributes: - max_findings_per_item (int): - Max number of findings that will be returned for each item - scanned. When set within ``InspectJobConfig``, the maximum - returned is 2000 regardless if this is set higher. When set - within ``InspectContentRequest``, this field is ignored. - max_findings_per_request (int): - Max number of findings that will be returned per - request/job. When set within ``InspectContentRequest``, the - maximum returned is 2000 regardless if this is set higher. - max_findings_per_info_type (Sequence[google.cloud.dlp_v2.types.InspectConfig.FindingLimits.InfoTypeLimit]): - Configuration of findings limit given for - specified infoTypes. - """ - - class InfoTypeLimit(proto.Message): - r"""Max findings configuration per infoType, per content item or - long running DlpJob. - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - Type of information the findings limit applies to. Only one - limit per info_type should be provided. If InfoTypeLimit - does not have an info_type, the DLP API applies the limit - against all info_types that are found but not specified in - another InfoTypeLimit. - max_findings (int): - Max findings limit for the given infoType. - """ - - info_type = proto.Field( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - max_findings = proto.Field( - proto.INT32, - number=2, - ) - - max_findings_per_item = proto.Field( - proto.INT32, - number=1, - ) - max_findings_per_request = proto.Field( - proto.INT32, - number=2, - ) - max_findings_per_info_type = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='InspectConfig.FindingLimits.InfoTypeLimit', - ) - - info_types = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - min_likelihood = proto.Field( - proto.ENUM, - number=2, - enum=storage.Likelihood, - ) - limits = proto.Field( - proto.MESSAGE, - number=3, - message=FindingLimits, - ) - include_quote = proto.Field( - proto.BOOL, - number=4, - ) - exclude_info_types = proto.Field( - proto.BOOL, - number=5, - ) - custom_info_types = proto.RepeatedField( - proto.MESSAGE, - number=6, - message=storage.CustomInfoType, - ) - content_options = proto.RepeatedField( - proto.ENUM, - number=8, - enum='ContentOption', - ) - rule_set = proto.RepeatedField( - proto.MESSAGE, - number=10, - message='InspectionRuleSet', - ) - - -class ByteContentItem(proto.Message): - r"""Container for bytes to inspect or redact. - Attributes: - type_ (google.cloud.dlp_v2.types.ByteContentItem.BytesType): - The type of data stored in the bytes string. Default will be - TEXT_UTF8. - data (bytes): - Content data to inspect or redact. - """ - class BytesType(proto.Enum): - r"""The type of data being sent for inspection.""" - BYTES_TYPE_UNSPECIFIED = 0 - IMAGE = 6 - IMAGE_JPEG = 1 - IMAGE_BMP = 2 - IMAGE_PNG = 3 - IMAGE_SVG = 4 - TEXT_UTF8 = 5 - WORD_DOCUMENT = 7 - PDF = 8 - AVRO = 11 - CSV = 12 - TSV = 13 - - type_ = proto.Field( - proto.ENUM, - number=1, - enum=BytesType, - ) - data = proto.Field( - proto.BYTES, - number=2, - ) - - -class ContentItem(proto.Message): - r"""Container structure for the content to inspect. - Attributes: - value (str): - String data to inspect or redact. - table (google.cloud.dlp_v2.types.Table): - Structured content for inspection. See - https://cloud.google.com/dlp/docs/inspecting-text#inspecting_a_table - to learn more. - byte_item (google.cloud.dlp_v2.types.ByteContentItem): - Content data to inspect or redact. Replaces ``type`` and - ``data``. - """ - - value = proto.Field( - proto.STRING, - number=3, - oneof='data_item', - ) - table = proto.Field( - proto.MESSAGE, - number=4, - oneof='data_item', - message='Table', - ) - byte_item = proto.Field( - proto.MESSAGE, - number=5, - oneof='data_item', - message='ByteContentItem', - ) - - -class Table(proto.Message): - r"""Structured content to inspect. Up to 50,000 ``Value``\ s per request - allowed. See - https://cloud.google.com/dlp/docs/inspecting-text#inspecting_a_table - to learn more. - - Attributes: - headers (Sequence[google.cloud.dlp_v2.types.FieldId]): - Headers of the table. - rows (Sequence[google.cloud.dlp_v2.types.Table.Row]): - Rows of the table. - """ - - class Row(proto.Message): - r"""Values of the row. - Attributes: - values (Sequence[google.cloud.dlp_v2.types.Value]): - Individual cells. - """ - - values = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Value', - ) - - headers = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - rows = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=Row, - ) - - -class InspectResult(proto.Message): - r"""All the findings for a single scanned item. - Attributes: - findings (Sequence[google.cloud.dlp_v2.types.Finding]): - List of findings for an item. - findings_truncated (bool): - If true, then this item might have more - findings than were returned, and the findings - returned are an arbitrary subset of all - findings. The findings list might be truncated - because the input items were too large, or - because the server reached the maximum amount of - resources allowed for a single API call. For - best results, divide the input into smaller - batches. - """ - - findings = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Finding', - ) - findings_truncated = proto.Field( - proto.BOOL, - number=2, - ) - - -class Finding(proto.Message): - r"""Represents a piece of potentially sensitive content. - Attributes: - name (str): - Resource name in format - projects/{project}/locations/{location}/findings/{finding} - Populated only when viewing persisted findings. - quote (str): - The content that was found. Even if the content is not - textual, it may be converted to a textual representation - here. Provided if ``include_quote`` is true and the finding - is less than or equal to 4096 bytes long. If the finding - exceeds 4096 bytes in length, the quote may be omitted. - info_type (google.cloud.dlp_v2.types.InfoType): - The type of content that might have been found. Provided if - ``excluded_types`` is false. - likelihood (google.cloud.dlp_v2.types.Likelihood): - Confidence of how likely it is that the ``info_type`` is - correct. - location (google.cloud.dlp_v2.types.Location): - Where the content was found. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Timestamp when finding was detected. - quote_info (google.cloud.dlp_v2.types.QuoteInfo): - Contains data parsed from quotes. Only populated if - include_quote was set to true and a supported infoType was - requested. Currently supported infoTypes: DATE, - DATE_OF_BIRTH and TIME. - resource_name (str): - The job that stored the finding. - trigger_name (str): - Job trigger name, if applicable, for this - finding. - labels (Sequence[google.cloud.dlp_v2.types.Finding.LabelsEntry]): - The labels associated with this ``Finding``. - - Label keys must be between 1 and 63 characters long and must - conform to the following regular expression: - ``[a-z]([-a-z0-9]*[a-z0-9])?``. - - Label values must be between 0 and 63 characters long and - must conform to the regular expression - ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. - - No more than 10 labels can be associated with a given - finding. - - Examples: - - - ``"environment" : "production"`` - - ``"pipeline" : "etl"`` - job_create_time (google.protobuf.timestamp_pb2.Timestamp): - Time the job started that produced this - finding. - job_name (str): - The job that stored the finding. - """ - - name = proto.Field( - proto.STRING, - number=14, - ) - quote = proto.Field( - proto.STRING, - number=1, - ) - info_type = proto.Field( - proto.MESSAGE, - number=2, - message=storage.InfoType, - ) - likelihood = proto.Field( - proto.ENUM, - number=3, - enum=storage.Likelihood, - ) - location = proto.Field( - proto.MESSAGE, - number=4, - message='Location', - ) - create_time = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - quote_info = proto.Field( - proto.MESSAGE, - number=7, - message='QuoteInfo', - ) - resource_name = proto.Field( - proto.STRING, - number=8, - ) - trigger_name = proto.Field( - proto.STRING, - number=9, - ) - labels = proto.MapField( - proto.STRING, - proto.STRING, - number=10, - ) - job_create_time = proto.Field( - proto.MESSAGE, - number=11, - message=timestamp_pb2.Timestamp, - ) - job_name = proto.Field( - proto.STRING, - number=13, - ) - - -class Location(proto.Message): - r"""Specifies the location of the finding. - Attributes: - byte_range (google.cloud.dlp_v2.types.Range): - Zero-based byte offsets delimiting the - finding. These are relative to the finding's - containing element. Note that when the content - is not textual, this references the UTF-8 - encoded textual representation of the content. - Omitted if content is an image. - codepoint_range (google.cloud.dlp_v2.types.Range): - Unicode character offsets delimiting the - finding. These are relative to the finding's - containing element. Provided when the content is - text. - content_locations (Sequence[google.cloud.dlp_v2.types.ContentLocation]): - List of nested objects pointing to the - precise location of the finding within the file - or record. - container (google.cloud.dlp_v2.types.Container): - Information about the container where this - finding occurred, if available. - """ - - byte_range = proto.Field( - proto.MESSAGE, - number=1, - message='Range', - ) - codepoint_range = proto.Field( - proto.MESSAGE, - number=2, - message='Range', - ) - content_locations = proto.RepeatedField( - proto.MESSAGE, - number=7, - message='ContentLocation', - ) - container = proto.Field( - proto.MESSAGE, - number=8, - message='Container', - ) - - -class ContentLocation(proto.Message): - r"""Precise location of the finding within a document, record, - image, or metadata container. - - Attributes: - container_name (str): - Name of the container where the finding is located. The top - level name is the source file name or table name. Names of - some common storage containers are formatted as follows: - - - BigQuery tables: ``{project_id}:{dataset_id}.{table_id}`` - - Cloud Storage files: ``gs://{bucket}/{path}`` - - Datastore namespace: {namespace} - - Nested names could be absent if the embedded object has no - string identifier (for an example an image contained within - a document). - record_location (google.cloud.dlp_v2.types.RecordLocation): - Location within a row or record of a database - table. - image_location (google.cloud.dlp_v2.types.ImageLocation): - Location within an image's pixels. - document_location (google.cloud.dlp_v2.types.DocumentLocation): - Location data for document files. - metadata_location (google.cloud.dlp_v2.types.MetadataLocation): - Location within the metadata for inspected - content. - container_timestamp (google.protobuf.timestamp_pb2.Timestamp): - Findings container modification timestamp, if applicable. - For Google Cloud Storage contains last file modification - timestamp. For BigQuery table contains last_modified_time - property. For Datastore - not populated. - container_version (str): - Findings container version, if available - ("generation" for Google Cloud Storage). - """ - - container_name = proto.Field( - proto.STRING, - number=1, - ) - record_location = proto.Field( - proto.MESSAGE, - number=2, - oneof='location', - message='RecordLocation', - ) - image_location = proto.Field( - proto.MESSAGE, - number=3, - oneof='location', - message='ImageLocation', - ) - document_location = proto.Field( - proto.MESSAGE, - number=5, - oneof='location', - message='DocumentLocation', - ) - metadata_location = proto.Field( - proto.MESSAGE, - number=8, - oneof='location', - message='MetadataLocation', - ) - container_timestamp = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - container_version = proto.Field( - proto.STRING, - number=7, - ) - - -class MetadataLocation(proto.Message): - r"""Metadata Location - Attributes: - type_ (google.cloud.dlp_v2.types.MetadataType): - Type of metadata containing the finding. - storage_label (google.cloud.dlp_v2.types.StorageMetadataLabel): - Storage metadata. - """ - - type_ = proto.Field( - proto.ENUM, - number=1, - enum='MetadataType', - ) - storage_label = proto.Field( - proto.MESSAGE, - number=3, - oneof='label', - message='StorageMetadataLabel', - ) - - -class StorageMetadataLabel(proto.Message): - r"""Storage metadata label to indicate which metadata entry - contains findings. - - Attributes: - key (str): - - """ - - key = proto.Field( - proto.STRING, - number=1, - ) - - -class DocumentLocation(proto.Message): - r"""Location of a finding within a document. - Attributes: - file_offset (int): - Offset of the line, from the beginning of the - file, where the finding is located. - """ - - file_offset = proto.Field( - proto.INT64, - number=1, - ) - - -class RecordLocation(proto.Message): - r"""Location of a finding within a row or record. - Attributes: - record_key (google.cloud.dlp_v2.types.RecordKey): - Key of the finding. - field_id (google.cloud.dlp_v2.types.FieldId): - Field id of the field containing the finding. - table_location (google.cloud.dlp_v2.types.TableLocation): - Location within a ``ContentItem.Table``. - """ - - record_key = proto.Field( - proto.MESSAGE, - number=1, - message=storage.RecordKey, - ) - field_id = proto.Field( - proto.MESSAGE, - number=2, - message=storage.FieldId, - ) - table_location = proto.Field( - proto.MESSAGE, - number=3, - message='TableLocation', - ) - - -class TableLocation(proto.Message): - r"""Location of a finding within a table. - Attributes: - row_index (int): - The zero-based index of the row where the finding is - located. Only populated for resources that have a natural - ordering, not BigQuery. In BigQuery, to identify the row a - finding came from, populate - BigQueryOptions.identifying_fields with your primary key - column names and when you store the findings the value of - those columns will be stored inside of Finding. - """ - - row_index = proto.Field( - proto.INT64, - number=1, - ) - - -class Container(proto.Message): - r"""Represents a container that may contain DLP findings. - Examples of a container include a file, table, or database - record. - - Attributes: - type_ (str): - Container type, for example BigQuery or - Google Cloud Storage. - project_id (str): - Project where the finding was found. - Can be different from the project that owns the - finding. - full_path (str): - A string representation of the full container - name. Examples: - - BigQuery: 'Project:DataSetId.TableId' - - Google Cloud Storage: - 'gs://Bucket/folders/filename.txt' - root_path (str): - The root of the container. Examples: - - - For BigQuery table ``project_id:dataset_id.table_id``, - the root is ``dataset_id`` - - For Google Cloud Storage file - ``gs://bucket/folder/filename.txt``, the root is - ``gs://bucket`` - relative_path (str): - The rest of the path after the root. Examples: - - - For BigQuery table ``project_id:dataset_id.table_id``, - the relative path is ``table_id`` - - Google Cloud Storage file - ``gs://bucket/folder/filename.txt``, the relative path is - ``folder/filename.txt`` - update_time (google.protobuf.timestamp_pb2.Timestamp): - Findings container modification timestamp, if applicable. - For Google Cloud Storage contains last file modification - timestamp. For BigQuery table contains last_modified_time - property. For Datastore - not populated. - version (str): - Findings container version, if available - ("generation" for Google Cloud Storage). - """ - - type_ = proto.Field( - proto.STRING, - number=1, - ) - project_id = proto.Field( - proto.STRING, - number=2, - ) - full_path = proto.Field( - proto.STRING, - number=3, - ) - root_path = proto.Field( - proto.STRING, - number=4, - ) - relative_path = proto.Field( - proto.STRING, - number=5, - ) - update_time = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - version = proto.Field( - proto.STRING, - number=7, - ) - - -class Range(proto.Message): - r"""Generic half-open interval [start, end) - Attributes: - start (int): - Index of the first character of the range - (inclusive). - end (int): - Index of the last character of the range - (exclusive). - """ - - start = proto.Field( - proto.INT64, - number=1, - ) - end = proto.Field( - proto.INT64, - number=2, - ) - - -class ImageLocation(proto.Message): - r"""Location of the finding within an image. - Attributes: - bounding_boxes (Sequence[google.cloud.dlp_v2.types.BoundingBox]): - Bounding boxes locating the pixels within the - image containing the finding. - """ - - bounding_boxes = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='BoundingBox', - ) - - -class BoundingBox(proto.Message): - r"""Bounding box encompassing detected text within an image. - Attributes: - top (int): - Top coordinate of the bounding box. (0,0) is - upper left. - left (int): - Left coordinate of the bounding box. (0,0) is - upper left. - width (int): - Width of the bounding box in pixels. - height (int): - Height of the bounding box in pixels. - """ - - top = proto.Field( - proto.INT32, - number=1, - ) - left = proto.Field( - proto.INT32, - number=2, - ) - width = proto.Field( - proto.INT32, - number=3, - ) - height = proto.Field( - proto.INT32, - number=4, - ) - - -class RedactImageRequest(proto.Message): - r"""Request to search for potentially sensitive info in an image - and redact it by covering it with a colored rectangle. - - Attributes: - parent (str): - Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - location_id (str): - Deprecated. This field has no effect. - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - Configuration for the inspector. - image_redaction_configs (Sequence[google.cloud.dlp_v2.types.RedactImageRequest.ImageRedactionConfig]): - The configuration for specifying what content - to redact from images. - include_findings (bool): - Whether the response should include findings - along with the redacted image. - byte_item (google.cloud.dlp_v2.types.ByteContentItem): - The content must be PNG, JPEG, SVG or BMP. - """ - - class ImageRedactionConfig(proto.Message): - r"""Configuration for determining how redaction of images should - occur. - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - Only one per info_type should be provided per request. If - not specified, and redact_all_text is false, the DLP API - will redact all text that it matches against all info_types - that are found, but not specified in another - ImageRedactionConfig. - redact_all_text (bool): - If true, all text found in the image, regardless whether it - matches an info_type, is redacted. Only one should be - provided. - redaction_color (google.cloud.dlp_v2.types.Color): - The color to use when redacting content from - an image. If not specified, the default is - black. - """ - - info_type = proto.Field( - proto.MESSAGE, - number=1, - oneof='target', - message=storage.InfoType, - ) - redact_all_text = proto.Field( - proto.BOOL, - number=2, - oneof='target', - ) - redaction_color = proto.Field( - proto.MESSAGE, - number=3, - message='Color', - ) - - parent = proto.Field( - proto.STRING, - number=1, - ) - location_id = proto.Field( - proto.STRING, - number=8, - ) - inspect_config = proto.Field( - proto.MESSAGE, - number=2, - message='InspectConfig', - ) - image_redaction_configs = proto.RepeatedField( - proto.MESSAGE, - number=5, - message=ImageRedactionConfig, - ) - include_findings = proto.Field( - proto.BOOL, - number=6, - ) - byte_item = proto.Field( - proto.MESSAGE, - number=7, - message='ByteContentItem', - ) - - -class Color(proto.Message): - r"""Represents a color in the RGB color space. - Attributes: - red (float): - The amount of red in the color as a value in the interval - [0, 1]. - green (float): - The amount of green in the color as a value in the interval - [0, 1]. - blue (float): - The amount of blue in the color as a value in the interval - [0, 1]. - """ - - red = proto.Field( - proto.FLOAT, - number=1, - ) - green = proto.Field( - proto.FLOAT, - number=2, - ) - blue = proto.Field( - proto.FLOAT, - number=3, - ) - - -class RedactImageResponse(proto.Message): - r"""Results of redacting an image. - Attributes: - redacted_image (bytes): - The redacted image. The type will be the same - as the original image. - extracted_text (str): - If an image was being inspected and the InspectConfig's - include_quote was set to true, then this field will include - all text, if any, that was found in the image. - inspect_result (google.cloud.dlp_v2.types.InspectResult): - The findings. Populated when include_findings in the request - is true. - """ - - redacted_image = proto.Field( - proto.BYTES, - number=1, - ) - extracted_text = proto.Field( - proto.STRING, - number=2, - ) - inspect_result = proto.Field( - proto.MESSAGE, - number=3, - message='InspectResult', - ) - - -class DeidentifyContentRequest(proto.Message): - r"""Request to de-identify a list of items. - Attributes: - parent (str): - Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - deidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): - Configuration for the de-identification of the content item. - Items specified here will override the template referenced - by the deidentify_template_name argument. - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - Configuration for the inspector. Items specified here will - override the template referenced by the - inspect_template_name argument. - item (google.cloud.dlp_v2.types.ContentItem): - The item to de-identify. Will be treated as - text. - inspect_template_name (str): - Template to use. Any configuration directly specified in - inspect_config will override those set in the template. - Singular fields that are set in this request will replace - their corresponding fields in the template. Repeated fields - are appended. Singular sub-messages and groups are - recursively merged. - deidentify_template_name (str): - Template to use. Any configuration directly specified in - deidentify_config will override those set in the template. - Singular fields that are set in this request will replace - their corresponding fields in the template. Repeated fields - are appended. Singular sub-messages and groups are - recursively merged. - location_id (str): - Deprecated. This field has no effect. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - deidentify_config = proto.Field( - proto.MESSAGE, - number=2, - message='DeidentifyConfig', - ) - inspect_config = proto.Field( - proto.MESSAGE, - number=3, - message='InspectConfig', - ) - item = proto.Field( - proto.MESSAGE, - number=4, - message='ContentItem', - ) - inspect_template_name = proto.Field( - proto.STRING, - number=5, - ) - deidentify_template_name = proto.Field( - proto.STRING, - number=6, - ) - location_id = proto.Field( - proto.STRING, - number=7, - ) - - -class DeidentifyContentResponse(proto.Message): - r"""Results of de-identifying a ContentItem. - Attributes: - item (google.cloud.dlp_v2.types.ContentItem): - The de-identified item. - overview (google.cloud.dlp_v2.types.TransformationOverview): - An overview of the changes that were made on the ``item``. - """ - - item = proto.Field( - proto.MESSAGE, - number=1, - message='ContentItem', - ) - overview = proto.Field( - proto.MESSAGE, - number=2, - message='TransformationOverview', - ) - - -class ReidentifyContentRequest(proto.Message): - r"""Request to re-identify an item. - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - reidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): - Configuration for the re-identification of the content item. - This field shares the same proto message type that is used - for de-identification, however its usage here is for the - reversal of the previous de-identification. - Re-identification is performed by examining the - transformations used to de-identify the items and executing - the reverse. This requires that only reversible - transformations be provided here. The reversible - transformations are: - - - ``CryptoDeterministicConfig`` - - ``CryptoReplaceFfxFpeConfig`` - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - Configuration for the inspector. - item (google.cloud.dlp_v2.types.ContentItem): - The item to re-identify. Will be treated as - text. - inspect_template_name (str): - Template to use. Any configuration directly specified in - ``inspect_config`` will override those set in the template. - Singular fields that are set in this request will replace - their corresponding fields in the template. Repeated fields - are appended. Singular sub-messages and groups are - recursively merged. - reidentify_template_name (str): - Template to use. References an instance of - ``DeidentifyTemplate``. Any configuration directly specified - in ``reidentify_config`` or ``inspect_config`` will override - those set in the template. The ``DeidentifyTemplate`` used - must include only reversible transformations. Singular - fields that are set in this request will replace their - corresponding fields in the template. Repeated fields are - appended. Singular sub-messages and groups are recursively - merged. - location_id (str): - Deprecated. This field has no effect. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - reidentify_config = proto.Field( - proto.MESSAGE, - number=2, - message='DeidentifyConfig', - ) - inspect_config = proto.Field( - proto.MESSAGE, - number=3, - message='InspectConfig', - ) - item = proto.Field( - proto.MESSAGE, - number=4, - message='ContentItem', - ) - inspect_template_name = proto.Field( - proto.STRING, - number=5, - ) - reidentify_template_name = proto.Field( - proto.STRING, - number=6, - ) - location_id = proto.Field( - proto.STRING, - number=7, - ) - - -class ReidentifyContentResponse(proto.Message): - r"""Results of re-identifying a item. - Attributes: - item (google.cloud.dlp_v2.types.ContentItem): - The re-identified item. - overview (google.cloud.dlp_v2.types.TransformationOverview): - An overview of the changes that were made to the ``item``. - """ - - item = proto.Field( - proto.MESSAGE, - number=1, - message='ContentItem', - ) - overview = proto.Field( - proto.MESSAGE, - number=2, - message='TransformationOverview', - ) - - -class InspectContentRequest(proto.Message): - r"""Request to search for potentially sensitive info in a - ContentItem. - - Attributes: - parent (str): - Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - Configuration for the inspector. What specified here will - override the template referenced by the - inspect_template_name argument. - item (google.cloud.dlp_v2.types.ContentItem): - The item to inspect. - inspect_template_name (str): - Template to use. Any configuration directly specified in - inspect_config will override those set in the template. - Singular fields that are set in this request will replace - their corresponding fields in the template. Repeated fields - are appended. Singular sub-messages and groups are - recursively merged. - location_id (str): - Deprecated. This field has no effect. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - inspect_config = proto.Field( - proto.MESSAGE, - number=2, - message='InspectConfig', - ) - item = proto.Field( - proto.MESSAGE, - number=3, - message='ContentItem', - ) - inspect_template_name = proto.Field( - proto.STRING, - number=4, - ) - location_id = proto.Field( - proto.STRING, - number=5, - ) - - -class InspectContentResponse(proto.Message): - r"""Results of inspecting an item. - Attributes: - result (google.cloud.dlp_v2.types.InspectResult): - The findings. - """ - - result = proto.Field( - proto.MESSAGE, - number=1, - message='InspectResult', - ) - - -class OutputStorageConfig(proto.Message): - r"""Cloud repository for storing output. - Attributes: - table (google.cloud.dlp_v2.types.BigQueryTable): - Store findings in an existing table or a new table in an - existing dataset. If table_id is not set a new one will be - generated for you with the following format: - dlp_googleapis_yyyy_mm_dd_[dlp_job_id]. Pacific timezone - will be used for generating the date details. - - For Inspect, each column in an existing output table must - have the same name, type, and mode of a field in the - ``Finding`` object. - - For Risk, an existing output table should be the output of a - previous Risk analysis job run on the same source table, - with the same privacy metric and quasi-identifiers. Risk - jobs that analyze the same table but compute a different - privacy metric, or use different sets of quasi-identifiers, - cannot store their results in the same table. - output_schema (google.cloud.dlp_v2.types.OutputStorageConfig.OutputSchema): - Schema used for writing the findings for Inspect jobs. This - field is only used for Inspect and must be unspecified for - Risk jobs. Columns are derived from the ``Finding`` object. - If appending to an existing table, any columns from the - predefined schema that are missing will be added. No columns - in the existing table will be deleted. - - If unspecified, then all available columns will be used for - a new table or an (existing) table with no schema, and no - changes will be made to an existing table that has a schema. - Only for use with external storage. - """ - class OutputSchema(proto.Enum): - r"""Predefined schemas for storing findings. - Only for use with external storage. - """ - OUTPUT_SCHEMA_UNSPECIFIED = 0 - BASIC_COLUMNS = 1 - GCS_COLUMNS = 2 - DATASTORE_COLUMNS = 3 - BIG_QUERY_COLUMNS = 4 - ALL_COLUMNS = 5 - - table = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message=storage.BigQueryTable, - ) - output_schema = proto.Field( - proto.ENUM, - number=3, - enum=OutputSchema, - ) - - -class InfoTypeStats(proto.Message): - r"""Statistics regarding a specific InfoType. - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - The type of finding this stat is for. - count (int): - Number of findings for this infoType. - """ - - info_type = proto.Field( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - count = proto.Field( - proto.INT64, - number=2, - ) - - -class InspectDataSourceDetails(proto.Message): - r"""The results of an inspect DataSource job. - Attributes: - requested_options (google.cloud.dlp_v2.types.InspectDataSourceDetails.RequestedOptions): - The configuration used for this job. - result (google.cloud.dlp_v2.types.InspectDataSourceDetails.Result): - A summary of the outcome of this inspect job. - """ - - class RequestedOptions(proto.Message): - r"""Snapshot of the inspection configuration. - Attributes: - snapshot_inspect_template (google.cloud.dlp_v2.types.InspectTemplate): - If run with an InspectTemplate, a snapshot of - its state at the time of this run. - job_config (google.cloud.dlp_v2.types.InspectJobConfig): - Inspect config. - """ - - snapshot_inspect_template = proto.Field( - proto.MESSAGE, - number=1, - message='InspectTemplate', - ) - job_config = proto.Field( - proto.MESSAGE, - number=3, - message='InspectJobConfig', - ) - - class Result(proto.Message): - r"""All result fields mentioned below are updated while the job - is processing. - - Attributes: - processed_bytes (int): - Total size in bytes that were processed. - total_estimated_bytes (int): - Estimate of the number of bytes to process. - info_type_stats (Sequence[google.cloud.dlp_v2.types.InfoTypeStats]): - Statistics of how many instances of each info - type were found during inspect job. - hybrid_stats (google.cloud.dlp_v2.types.HybridInspectStatistics): - Statistics related to the processing of - hybrid inspect. Early access feature is in a - pre-release state and might change or have - limited support. For more information, see - https://cloud.google.com/products#product- - launch-stages. - """ - - processed_bytes = proto.Field( - proto.INT64, - number=1, - ) - total_estimated_bytes = proto.Field( - proto.INT64, - number=2, - ) - info_type_stats = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='InfoTypeStats', - ) - hybrid_stats = proto.Field( - proto.MESSAGE, - number=7, - message='HybridInspectStatistics', - ) - - requested_options = proto.Field( - proto.MESSAGE, - number=2, - message=RequestedOptions, - ) - result = proto.Field( - proto.MESSAGE, - number=3, - message=Result, - ) - - -class HybridInspectStatistics(proto.Message): - r"""Statistics related to processing hybrid inspect requests. - Attributes: - processed_count (int): - The number of hybrid inspection requests - processed within this job. - aborted_count (int): - The number of hybrid inspection requests - aborted because the job ran out of quota or was - ended before they could be processed. - pending_count (int): - The number of hybrid requests currently being processed. - Only populated when called via method ``getDlpJob``. A burst - of traffic may cause hybrid inspect requests to be enqueued. - Processing will take place as quickly as possible, but - resource limitations may impact how long a request is - enqueued for. - """ - - processed_count = proto.Field( - proto.INT64, - number=1, - ) - aborted_count = proto.Field( - proto.INT64, - number=2, - ) - pending_count = proto.Field( - proto.INT64, - number=3, - ) - - -class InfoTypeDescription(proto.Message): - r"""InfoType description. - Attributes: - name (str): - Internal name of the infoType. - display_name (str): - Human readable form of the infoType name. - supported_by (Sequence[google.cloud.dlp_v2.types.InfoTypeSupportedBy]): - Which parts of the API supports this - InfoType. - description (str): - Description of the infotype. Translated when - language is provided in the request. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - display_name = proto.Field( - proto.STRING, - number=2, - ) - supported_by = proto.RepeatedField( - proto.ENUM, - number=3, - enum='InfoTypeSupportedBy', - ) - description = proto.Field( - proto.STRING, - number=4, - ) - - -class ListInfoTypesRequest(proto.Message): - r"""Request for the list of infoTypes. - Attributes: - parent (str): - The parent resource name. - - The format of this value is as follows: - - :: - - locations/LOCATION_ID - language_code (str): - BCP-47 language code for localized infoType - friendly names. If omitted, or if localized - strings are not available, en-US strings will be - returned. - filter (str): - filter to only return infoTypes supported by certain parts - of the API. Defaults to supported_by=INSPECT. - location_id (str): - Deprecated. This field has no effect. - """ - - parent = proto.Field( - proto.STRING, - number=4, - ) - language_code = proto.Field( - proto.STRING, - number=1, - ) - filter = proto.Field( - proto.STRING, - number=2, - ) - location_id = proto.Field( - proto.STRING, - number=3, - ) - - -class ListInfoTypesResponse(proto.Message): - r"""Response to the ListInfoTypes request. - Attributes: - info_types (Sequence[google.cloud.dlp_v2.types.InfoTypeDescription]): - Set of sensitive infoTypes. - """ - - info_types = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='InfoTypeDescription', - ) - - -class RiskAnalysisJobConfig(proto.Message): - r"""Configuration for a risk analysis job. See - https://cloud.google.com/dlp/docs/concepts-risk-analysis to - learn more. - - Attributes: - privacy_metric (google.cloud.dlp_v2.types.PrivacyMetric): - Privacy metric to compute. - source_table (google.cloud.dlp_v2.types.BigQueryTable): - Input dataset to compute metrics over. - actions (Sequence[google.cloud.dlp_v2.types.Action]): - Actions to execute at the completion of the - job. Are executed in the order provided. - """ - - privacy_metric = proto.Field( - proto.MESSAGE, - number=1, - message='PrivacyMetric', - ) - source_table = proto.Field( - proto.MESSAGE, - number=2, - message=storage.BigQueryTable, - ) - actions = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='Action', - ) - - -class QuasiId(proto.Message): - r"""A column with a semantic tag attached. - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Required. Identifies the column. - info_type (google.cloud.dlp_v2.types.InfoType): - A column can be tagged with a InfoType to use the relevant - public dataset as a statistical model of population, if - available. We currently support US ZIP codes, region codes, - ages and genders. To programmatically obtain the list of - supported InfoTypes, use ListInfoTypes with the - supported_by=RISK_ANALYSIS filter. - custom_tag (str): - A column can be tagged with a custom tag. In - this case, the user must indicate an auxiliary - table that contains statistical information on - the possible values of this column (below). - inferred (google.protobuf.empty_pb2.Empty): - If no semantic tag is indicated, we infer the - statistical model from the distribution of - values in the input data - """ - - field = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - info_type = proto.Field( - proto.MESSAGE, - number=2, - oneof='tag', - message=storage.InfoType, - ) - custom_tag = proto.Field( - proto.STRING, - number=3, - oneof='tag', - ) - inferred = proto.Field( - proto.MESSAGE, - number=4, - oneof='tag', - message=empty_pb2.Empty, - ) - - -class StatisticalTable(proto.Message): - r"""An auxiliary table containing statistical information on the - relative frequency of different quasi-identifiers values. It has - one or several quasi-identifiers columns, and one column that - indicates the relative frequency of each quasi-identifier tuple. - If a tuple is present in the data but not in the auxiliary - table, the corresponding relative frequency is assumed to be - zero (and thus, the tuple is highly reidentifiable). - - Attributes: - table (google.cloud.dlp_v2.types.BigQueryTable): - Required. Auxiliary table location. - quasi_ids (Sequence[google.cloud.dlp_v2.types.StatisticalTable.QuasiIdentifierField]): - Required. Quasi-identifier columns. - relative_frequency (google.cloud.dlp_v2.types.FieldId): - Required. The relative frequency column must - contain a floating-point number between 0 and 1 - (inclusive). Null values are assumed to be zero. - """ - - class QuasiIdentifierField(proto.Message): - r"""A quasi-identifier column has a custom_tag, used to know which - column in the data corresponds to which column in the statistical - model. - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Identifies the column. - custom_tag (str): - A column can be tagged with a custom tag. In - this case, the user must indicate an auxiliary - table that contains statistical information on - the possible values of this column (below). - """ - - field = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - custom_tag = proto.Field( - proto.STRING, - number=2, - ) - - table = proto.Field( - proto.MESSAGE, - number=3, - message=storage.BigQueryTable, - ) - quasi_ids = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=QuasiIdentifierField, - ) - relative_frequency = proto.Field( - proto.MESSAGE, - number=2, - message=storage.FieldId, - ) - - -class PrivacyMetric(proto.Message): - r"""Privacy metric to compute for reidentification risk analysis. - Attributes: - numerical_stats_config (google.cloud.dlp_v2.types.PrivacyMetric.NumericalStatsConfig): - Numerical stats - categorical_stats_config (google.cloud.dlp_v2.types.PrivacyMetric.CategoricalStatsConfig): - Categorical stats - k_anonymity_config (google.cloud.dlp_v2.types.PrivacyMetric.KAnonymityConfig): - K-anonymity - l_diversity_config (google.cloud.dlp_v2.types.PrivacyMetric.LDiversityConfig): - l-diversity - k_map_estimation_config (google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig): - k-map - delta_presence_estimation_config (google.cloud.dlp_v2.types.PrivacyMetric.DeltaPresenceEstimationConfig): - delta-presence - """ - - class NumericalStatsConfig(proto.Message): - r"""Compute numerical stats over an individual column, including - min, max, and quantiles. - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Field to compute numerical stats on. - Supported types are integer, float, date, - datetime, timestamp, time. - """ - - field = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - - class CategoricalStatsConfig(proto.Message): - r"""Compute numerical stats over an individual column, including - number of distinct values and value count distribution. - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Field to compute categorical stats on. All - column types are supported except for arrays and - structs. However, it may be more informative to - use NumericalStats when the field type is - supported, depending on the data. - """ - - field = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - - class KAnonymityConfig(proto.Message): - r"""k-anonymity metric, used for analysis of reidentification - risk. - - Attributes: - quasi_ids (Sequence[google.cloud.dlp_v2.types.FieldId]): - Set of fields to compute k-anonymity over. - When multiple fields are specified, they are - considered a single composite key. Structs and - repeated data types are not supported; however, - nested fields are supported so long as they are - not structs themselves or nested within a - repeated field. - entity_id (google.cloud.dlp_v2.types.EntityId): - Message indicating that multiple rows might be associated to - a single individual. If the same entity_id is associated to - multiple quasi-identifier tuples over distinct rows, we - consider the entire collection of tuples as the composite - quasi-identifier. This collection is a multiset: the order - in which the different tuples appear in the dataset is - ignored, but their frequency is taken into account. - - Important note: a maximum of 1000 rows can be associated to - a single entity ID. If more rows are associated with the - same entity ID, some might be ignored. - """ - - quasi_ids = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - entity_id = proto.Field( - proto.MESSAGE, - number=2, - message=storage.EntityId, - ) - - class LDiversityConfig(proto.Message): - r"""l-diversity metric, used for analysis of reidentification - risk. - - Attributes: - quasi_ids (Sequence[google.cloud.dlp_v2.types.FieldId]): - Set of quasi-identifiers indicating how - equivalence classes are defined for the - l-diversity computation. When multiple fields - are specified, they are considered a single - composite key. - sensitive_attribute (google.cloud.dlp_v2.types.FieldId): - Sensitive field for computing the l-value. - """ - - quasi_ids = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - sensitive_attribute = proto.Field( - proto.MESSAGE, - number=2, - message=storage.FieldId, - ) - - class KMapEstimationConfig(proto.Message): - r"""Reidentifiability metric. This corresponds to a risk model - similar to what is called "journalist risk" in the literature, - except the attack dataset is statistically modeled instead of - being perfectly known. This can be done using publicly available - data (like the US Census), or using a custom statistical model - (indicated as one or several BigQuery tables), or by - extrapolating from the distribution of values in the input - dataset. - - Attributes: - quasi_ids (Sequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.TaggedField]): - Required. Fields considered to be quasi- - dentifiers. No two columns can have the same - tag. - region_code (str): - ISO 3166-1 alpha-2 region code to use in the statistical - modeling. Set if no column is tagged with a region-specific - InfoType (like US_ZIP_5) or a region code. - auxiliary_tables (Sequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable]): - Several auxiliary tables can be used in the analysis. Each - custom_tag used to tag a quasi-identifiers column must - appear in exactly one column of one auxiliary table. - """ - - class TaggedField(proto.Message): - r"""A column with a semantic tag attached. - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Required. Identifies the column. - info_type (google.cloud.dlp_v2.types.InfoType): - A column can be tagged with a InfoType to use the relevant - public dataset as a statistical model of population, if - available. We currently support US ZIP codes, region codes, - ages and genders. To programmatically obtain the list of - supported InfoTypes, use ListInfoTypes with the - supported_by=RISK_ANALYSIS filter. - custom_tag (str): - A column can be tagged with a custom tag. In - this case, the user must indicate an auxiliary - table that contains statistical information on - the possible values of this column (below). - inferred (google.protobuf.empty_pb2.Empty): - If no semantic tag is indicated, we infer the - statistical model from the distribution of - values in the input data - """ - - field = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - info_type = proto.Field( - proto.MESSAGE, - number=2, - oneof='tag', - message=storage.InfoType, - ) - custom_tag = proto.Field( - proto.STRING, - number=3, - oneof='tag', - ) - inferred = proto.Field( - proto.MESSAGE, - number=4, - oneof='tag', - message=empty_pb2.Empty, - ) - - class AuxiliaryTable(proto.Message): - r"""An auxiliary table contains statistical information on the - relative frequency of different quasi-identifiers values. It has - one or several quasi-identifiers columns, and one column that - indicates the relative frequency of each quasi-identifier tuple. - If a tuple is present in the data but not in the auxiliary - table, the corresponding relative frequency is assumed to be - zero (and thus, the tuple is highly reidentifiable). - - Attributes: - table (google.cloud.dlp_v2.types.BigQueryTable): - Required. Auxiliary table location. - quasi_ids (Sequence[google.cloud.dlp_v2.types.PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField]): - Required. Quasi-identifier columns. - relative_frequency (google.cloud.dlp_v2.types.FieldId): - Required. The relative frequency column must - contain a floating-point number between 0 and 1 - (inclusive). Null values are assumed to be zero. - """ - - class QuasiIdField(proto.Message): - r"""A quasi-identifier column has a custom_tag, used to know which - column in the data corresponds to which column in the statistical - model. - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Identifies the column. - custom_tag (str): - A auxiliary field. - """ - - field = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - custom_tag = proto.Field( - proto.STRING, - number=2, - ) - - table = proto.Field( - proto.MESSAGE, - number=3, - message=storage.BigQueryTable, - ) - quasi_ids = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='PrivacyMetric.KMapEstimationConfig.AuxiliaryTable.QuasiIdField', - ) - relative_frequency = proto.Field( - proto.MESSAGE, - number=2, - message=storage.FieldId, - ) - - quasi_ids = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='PrivacyMetric.KMapEstimationConfig.TaggedField', - ) - region_code = proto.Field( - proto.STRING, - number=2, - ) - auxiliary_tables = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='PrivacyMetric.KMapEstimationConfig.AuxiliaryTable', - ) - - class DeltaPresenceEstimationConfig(proto.Message): - r"""δ-presence metric, used to estimate how likely it is for an - attacker to figure out that one given individual appears in a - de-identified dataset. Similarly to the k-map metric, we cannot - compute δ-presence exactly without knowing the attack dataset, - so we use a statistical model instead. - - Attributes: - quasi_ids (Sequence[google.cloud.dlp_v2.types.QuasiId]): - Required. Fields considered to be quasi- - dentifiers. No two fields can have the same tag. - region_code (str): - ISO 3166-1 alpha-2 region code to use in the statistical - modeling. Set if no column is tagged with a region-specific - InfoType (like US_ZIP_5) or a region code. - auxiliary_tables (Sequence[google.cloud.dlp_v2.types.StatisticalTable]): - Several auxiliary tables can be used in the analysis. Each - custom_tag used to tag a quasi-identifiers field must appear - in exactly one field of one auxiliary table. - """ - - quasi_ids = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='QuasiId', - ) - region_code = proto.Field( - proto.STRING, - number=2, - ) - auxiliary_tables = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='StatisticalTable', - ) - - numerical_stats_config = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message=NumericalStatsConfig, - ) - categorical_stats_config = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message=CategoricalStatsConfig, - ) - k_anonymity_config = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message=KAnonymityConfig, - ) - l_diversity_config = proto.Field( - proto.MESSAGE, - number=4, - oneof='type', - message=LDiversityConfig, - ) - k_map_estimation_config = proto.Field( - proto.MESSAGE, - number=5, - oneof='type', - message=KMapEstimationConfig, - ) - delta_presence_estimation_config = proto.Field( - proto.MESSAGE, - number=6, - oneof='type', - message=DeltaPresenceEstimationConfig, - ) - - -class AnalyzeDataSourceRiskDetails(proto.Message): - r"""Result of a risk analysis operation request. - Attributes: - requested_privacy_metric (google.cloud.dlp_v2.types.PrivacyMetric): - Privacy metric to compute. - requested_source_table (google.cloud.dlp_v2.types.BigQueryTable): - Input dataset to compute metrics over. - numerical_stats_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.NumericalStatsResult): - Numerical stats result - categorical_stats_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.CategoricalStatsResult): - Categorical stats result - k_anonymity_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult): - K-anonymity result - l_diversity_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult): - L-divesity result - k_map_estimation_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult): - K-map result - delta_presence_estimation_result (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult): - Delta-presence result - requested_options (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.RequestedRiskAnalysisOptions): - The configuration used for this job. - """ - - class NumericalStatsResult(proto.Message): - r"""Result of the numerical stats computation. - Attributes: - min_value (google.cloud.dlp_v2.types.Value): - Minimum value appearing in the column. - max_value (google.cloud.dlp_v2.types.Value): - Maximum value appearing in the column. - quantile_values (Sequence[google.cloud.dlp_v2.types.Value]): - List of 99 values that partition the set of - field values into 100 equal sized buckets. - """ - - min_value = proto.Field( - proto.MESSAGE, - number=1, - message='Value', - ) - max_value = proto.Field( - proto.MESSAGE, - number=2, - message='Value', - ) - quantile_values = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='Value', - ) - - class CategoricalStatsResult(proto.Message): - r"""Result of the categorical stats computation. - Attributes: - value_frequency_histogram_buckets (Sequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket]): - Histogram of value frequencies in the column. - """ - - class CategoricalStatsHistogramBucket(proto.Message): - r"""Histogram of value frequencies in the column. - Attributes: - value_frequency_lower_bound (int): - Lower bound on the value frequency of the - values in this bucket. - value_frequency_upper_bound (int): - Upper bound on the value frequency of the - values in this bucket. - bucket_size (int): - Total number of values in this bucket. - bucket_values (Sequence[google.cloud.dlp_v2.types.ValueFrequency]): - Sample of value frequencies in this bucket. - The total number of values returned per bucket - is capped at 20. - bucket_value_count (int): - Total number of distinct values in this - bucket. - """ - - value_frequency_lower_bound = proto.Field( - proto.INT64, - number=1, - ) - value_frequency_upper_bound = proto.Field( - proto.INT64, - number=2, - ) - bucket_size = proto.Field( - proto.INT64, - number=3, - ) - bucket_values = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='ValueFrequency', - ) - bucket_value_count = proto.Field( - proto.INT64, - number=5, - ) - - value_frequency_histogram_buckets = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='AnalyzeDataSourceRiskDetails.CategoricalStatsResult.CategoricalStatsHistogramBucket', - ) - - class KAnonymityResult(proto.Message): - r"""Result of the k-anonymity computation. - Attributes: - equivalence_class_histogram_buckets (Sequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket]): - Histogram of k-anonymity equivalence classes. - """ - - class KAnonymityEquivalenceClass(proto.Message): - r"""The set of columns' values that share the same ldiversity - value - - Attributes: - quasi_ids_values (Sequence[google.cloud.dlp_v2.types.Value]): - Set of values defining the equivalence class. - One value per quasi-identifier column in the - original KAnonymity metric message. The order is - always the same as the original request. - equivalence_class_size (int): - Size of the equivalence class, for example - number of rows with the above set of values. - """ - - quasi_ids_values = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Value', - ) - equivalence_class_size = proto.Field( - proto.INT64, - number=2, - ) - - class KAnonymityHistogramBucket(proto.Message): - r"""Histogram of k-anonymity equivalence classes. - Attributes: - equivalence_class_size_lower_bound (int): - Lower bound on the size of the equivalence - classes in this bucket. - equivalence_class_size_upper_bound (int): - Upper bound on the size of the equivalence - classes in this bucket. - bucket_size (int): - Total number of equivalence classes in this - bucket. - bucket_values (Sequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass]): - Sample of equivalence classes in this bucket. - The total number of classes returned per bucket - is capped at 20. - bucket_value_count (int): - Total number of distinct equivalence classes - in this bucket. - """ - - equivalence_class_size_lower_bound = proto.Field( - proto.INT64, - number=1, - ) - equivalence_class_size_upper_bound = proto.Field( - proto.INT64, - number=2, - ) - bucket_size = proto.Field( - proto.INT64, - number=3, - ) - bucket_values = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityEquivalenceClass', - ) - bucket_value_count = proto.Field( - proto.INT64, - number=5, - ) - - equivalence_class_histogram_buckets = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='AnalyzeDataSourceRiskDetails.KAnonymityResult.KAnonymityHistogramBucket', - ) - - class LDiversityResult(proto.Message): - r"""Result of the l-diversity computation. - Attributes: - sensitive_value_frequency_histogram_buckets (Sequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket]): - Histogram of l-diversity equivalence class - sensitive value frequencies. - """ - - class LDiversityEquivalenceClass(proto.Message): - r"""The set of columns' values that share the same ldiversity - value. - - Attributes: - quasi_ids_values (Sequence[google.cloud.dlp_v2.types.Value]): - Quasi-identifier values defining the - k-anonymity equivalence class. The order is - always the same as the original request. - equivalence_class_size (int): - Size of the k-anonymity equivalence class. - num_distinct_sensitive_values (int): - Number of distinct sensitive values in this - equivalence class. - top_sensitive_values (Sequence[google.cloud.dlp_v2.types.ValueFrequency]): - Estimated frequencies of top sensitive - values. - """ - - quasi_ids_values = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Value', - ) - equivalence_class_size = proto.Field( - proto.INT64, - number=2, - ) - num_distinct_sensitive_values = proto.Field( - proto.INT64, - number=3, - ) - top_sensitive_values = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='ValueFrequency', - ) - - class LDiversityHistogramBucket(proto.Message): - r"""Histogram of l-diversity equivalence class sensitive value - frequencies. - - Attributes: - sensitive_value_frequency_lower_bound (int): - Lower bound on the sensitive value - frequencies of the equivalence classes in this - bucket. - sensitive_value_frequency_upper_bound (int): - Upper bound on the sensitive value - frequencies of the equivalence classes in this - bucket. - bucket_size (int): - Total number of equivalence classes in this - bucket. - bucket_values (Sequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass]): - Sample of equivalence classes in this bucket. - The total number of classes returned per bucket - is capped at 20. - bucket_value_count (int): - Total number of distinct equivalence classes - in this bucket. - """ - - sensitive_value_frequency_lower_bound = proto.Field( - proto.INT64, - number=1, - ) - sensitive_value_frequency_upper_bound = proto.Field( - proto.INT64, - number=2, - ) - bucket_size = proto.Field( - proto.INT64, - number=3, - ) - bucket_values = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityEquivalenceClass', - ) - bucket_value_count = proto.Field( - proto.INT64, - number=5, - ) - - sensitive_value_frequency_histogram_buckets = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='AnalyzeDataSourceRiskDetails.LDiversityResult.LDiversityHistogramBucket', - ) - - class KMapEstimationResult(proto.Message): - r"""Result of the reidentifiability analysis. Note that these - results are an estimation, not exact values. - - Attributes: - k_map_estimation_histogram (Sequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket]): - The intervals [min_anonymity, max_anonymity] do not overlap. - If a value doesn't correspond to any such interval, the - associated frequency is zero. For example, the following - records: {min_anonymity: 1, max_anonymity: 1, frequency: 17} - {min_anonymity: 2, max_anonymity: 3, frequency: 42} - {min_anonymity: 5, max_anonymity: 10, frequency: 99} mean - that there are no record with an estimated anonymity of 4, - 5, or larger than 10. - """ - - class KMapEstimationQuasiIdValues(proto.Message): - r"""A tuple of values for the quasi-identifier columns. - Attributes: - quasi_ids_values (Sequence[google.cloud.dlp_v2.types.Value]): - The quasi-identifier values. - estimated_anonymity (int): - The estimated anonymity for these quasi- - dentifier values. - """ - - quasi_ids_values = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Value', - ) - estimated_anonymity = proto.Field( - proto.INT64, - number=2, - ) - - class KMapEstimationHistogramBucket(proto.Message): - r"""A KMapEstimationHistogramBucket message with the following values: - min_anonymity: 3 max_anonymity: 5 frequency: 42 means that there are - 42 records whose quasi-identifier values correspond to 3, 4 or 5 - people in the overlying population. An important particular case is - when min_anonymity = max_anonymity = 1: the frequency field then - corresponds to the number of uniquely identifiable records. - - Attributes: - min_anonymity (int): - Always positive. - max_anonymity (int): - Always greater than or equal to min_anonymity. - bucket_size (int): - Number of records within these anonymity - bounds. - bucket_values (Sequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues]): - Sample of quasi-identifier tuple values in - this bucket. The total number of classes - returned per bucket is capped at 20. - bucket_value_count (int): - Total number of distinct quasi-identifier - tuple values in this bucket. - """ - - min_anonymity = proto.Field( - proto.INT64, - number=1, - ) - max_anonymity = proto.Field( - proto.INT64, - number=2, - ) - bucket_size = proto.Field( - proto.INT64, - number=5, - ) - bucket_values = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationQuasiIdValues', - ) - bucket_value_count = proto.Field( - proto.INT64, - number=7, - ) - - k_map_estimation_histogram = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='AnalyzeDataSourceRiskDetails.KMapEstimationResult.KMapEstimationHistogramBucket', - ) - - class DeltaPresenceEstimationResult(proto.Message): - r"""Result of the δ-presence computation. Note that these results - are an estimation, not exact values. - - Attributes: - delta_presence_estimation_histogram (Sequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket]): - The intervals [min_probability, max_probability) do not - overlap. If a value doesn't correspond to any such interval, - the associated frequency is zero. For example, the following - records: {min_probability: 0, max_probability: 0.1, - frequency: 17} {min_probability: 0.2, max_probability: 0.3, - frequency: 42} {min_probability: 0.3, max_probability: 0.4, - frequency: 99} mean that there are no record with an - estimated probability in [0.1, 0.2) nor larger or equal to - 0.4. - """ - - class DeltaPresenceEstimationQuasiIdValues(proto.Message): - r"""A tuple of values for the quasi-identifier columns. - Attributes: - quasi_ids_values (Sequence[google.cloud.dlp_v2.types.Value]): - The quasi-identifier values. - estimated_probability (float): - The estimated probability that a given individual sharing - these quasi-identifier values is in the dataset. This value, - typically called δ, is the ratio between the number of - records in the dataset with these quasi-identifier values, - and the total number of individuals (inside *and* outside - the dataset) with these quasi-identifier values. For - example, if there are 15 individuals in the dataset who - share the same quasi-identifier values, and an estimated 100 - people in the entire population with these values, then δ is - 0.15. - """ - - quasi_ids_values = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Value', - ) - estimated_probability = proto.Field( - proto.DOUBLE, - number=2, - ) - - class DeltaPresenceEstimationHistogramBucket(proto.Message): - r"""A DeltaPresenceEstimationHistogramBucket message with the following - values: min_probability: 0.1 max_probability: 0.2 frequency: 42 - means that there are 42 records for which δ is in [0.1, 0.2). An - important particular case is when min_probability = max_probability - = 1: then, every individual who shares this quasi-identifier - combination is in the dataset. - - Attributes: - min_probability (float): - Between 0 and 1. - max_probability (float): - Always greater than or equal to min_probability. - bucket_size (int): - Number of records within these probability - bounds. - bucket_values (Sequence[google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues]): - Sample of quasi-identifier tuple values in - this bucket. The total number of classes - returned per bucket is capped at 20. - bucket_value_count (int): - Total number of distinct quasi-identifier - tuple values in this bucket. - """ - - min_probability = proto.Field( - proto.DOUBLE, - number=1, - ) - max_probability = proto.Field( - proto.DOUBLE, - number=2, - ) - bucket_size = proto.Field( - proto.INT64, - number=5, - ) - bucket_values = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationQuasiIdValues', - ) - bucket_value_count = proto.Field( - proto.INT64, - number=7, - ) - - delta_presence_estimation_histogram = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='AnalyzeDataSourceRiskDetails.DeltaPresenceEstimationResult.DeltaPresenceEstimationHistogramBucket', - ) - - class RequestedRiskAnalysisOptions(proto.Message): - r"""Risk analysis options. - Attributes: - job_config (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): - The job config for the risk job. - """ - - job_config = proto.Field( - proto.MESSAGE, - number=1, - message='RiskAnalysisJobConfig', - ) - - requested_privacy_metric = proto.Field( - proto.MESSAGE, - number=1, - message='PrivacyMetric', - ) - requested_source_table = proto.Field( - proto.MESSAGE, - number=2, - message=storage.BigQueryTable, - ) - numerical_stats_result = proto.Field( - proto.MESSAGE, - number=3, - oneof='result', - message=NumericalStatsResult, - ) - categorical_stats_result = proto.Field( - proto.MESSAGE, - number=4, - oneof='result', - message=CategoricalStatsResult, - ) - k_anonymity_result = proto.Field( - proto.MESSAGE, - number=5, - oneof='result', - message=KAnonymityResult, - ) - l_diversity_result = proto.Field( - proto.MESSAGE, - number=6, - oneof='result', - message=LDiversityResult, - ) - k_map_estimation_result = proto.Field( - proto.MESSAGE, - number=7, - oneof='result', - message=KMapEstimationResult, - ) - delta_presence_estimation_result = proto.Field( - proto.MESSAGE, - number=9, - oneof='result', - message=DeltaPresenceEstimationResult, - ) - requested_options = proto.Field( - proto.MESSAGE, - number=10, - message=RequestedRiskAnalysisOptions, - ) - - -class ValueFrequency(proto.Message): - r"""A value of a field, including its frequency. - Attributes: - value (google.cloud.dlp_v2.types.Value): - A value contained in the field in question. - count (int): - How many times the value is contained in the - field. - """ - - value = proto.Field( - proto.MESSAGE, - number=1, - message='Value', - ) - count = proto.Field( - proto.INT64, - number=2, - ) - - -class Value(proto.Message): - r"""Set of primitive values supported by the system. Note that for the - purposes of inspection or transformation, the number of bytes - considered to comprise a 'Value' is based on its representation as a - UTF-8 encoded string. For example, if 'integer_value' is set to - 123456789, the number of bytes would be counted as 9, even though an - int64 only holds up to 8 bytes of data. - - Attributes: - integer_value (int): - integer - float_value (float): - float - string_value (str): - string - boolean_value (bool): - boolean - timestamp_value (google.protobuf.timestamp_pb2.Timestamp): - timestamp - time_value (google.type.timeofday_pb2.TimeOfDay): - time of day - date_value (google.type.date_pb2.Date): - date - day_of_week_value (google.type.dayofweek_pb2.DayOfWeek): - day of week - """ - - integer_value = proto.Field( - proto.INT64, - number=1, - oneof='type', - ) - float_value = proto.Field( - proto.DOUBLE, - number=2, - oneof='type', - ) - string_value = proto.Field( - proto.STRING, - number=3, - oneof='type', - ) - boolean_value = proto.Field( - proto.BOOL, - number=4, - oneof='type', - ) - timestamp_value = proto.Field( - proto.MESSAGE, - number=5, - oneof='type', - message=timestamp_pb2.Timestamp, - ) - time_value = proto.Field( - proto.MESSAGE, - number=6, - oneof='type', - message=timeofday_pb2.TimeOfDay, - ) - date_value = proto.Field( - proto.MESSAGE, - number=7, - oneof='type', - message=date_pb2.Date, - ) - day_of_week_value = proto.Field( - proto.ENUM, - number=8, - oneof='type', - enum=dayofweek_pb2.DayOfWeek, - ) - - -class QuoteInfo(proto.Message): - r"""Message for infoType-dependent details parsed from quote. - Attributes: - date_time (google.cloud.dlp_v2.types.DateTime): - The date time indicated by the quote. - """ - - date_time = proto.Field( - proto.MESSAGE, - number=2, - oneof='parsed_quote', - message='DateTime', - ) - - -class DateTime(proto.Message): - r"""Message for a date time object. - e.g. 2018-01-01, 5th August. - - Attributes: - date (google.type.date_pb2.Date): - One or more of the following must be set. - Must be a valid date or time value. - day_of_week (google.type.dayofweek_pb2.DayOfWeek): - Day of week - time (google.type.timeofday_pb2.TimeOfDay): - Time of day - time_zone (google.cloud.dlp_v2.types.DateTime.TimeZone): - Time zone - """ - - class TimeZone(proto.Message): - r"""Time zone of the date time object. - Attributes: - offset_minutes (int): - Set only if the offset can be determined. - Positive for time ahead of UTC. E.g. For - "UTC-9", this value is -540. - """ - - offset_minutes = proto.Field( - proto.INT32, - number=1, - ) - - date = proto.Field( - proto.MESSAGE, - number=1, - message=date_pb2.Date, - ) - day_of_week = proto.Field( - proto.ENUM, - number=2, - enum=dayofweek_pb2.DayOfWeek, - ) - time = proto.Field( - proto.MESSAGE, - number=3, - message=timeofday_pb2.TimeOfDay, - ) - time_zone = proto.Field( - proto.MESSAGE, - number=4, - message=TimeZone, - ) - - -class DeidentifyConfig(proto.Message): - r"""The configuration that controls how the data will change. - Attributes: - info_type_transformations (google.cloud.dlp_v2.types.InfoTypeTransformations): - Treat the dataset as free-form text and apply - the same free text transformation everywhere. - record_transformations (google.cloud.dlp_v2.types.RecordTransformations): - Treat the dataset as structured. - Transformations can be applied to specific - locations within structured datasets, such as - transforming a column within a table. - transformation_error_handling (google.cloud.dlp_v2.types.TransformationErrorHandling): - Mode for handling transformation errors. If left - unspecified, the default mode is - ``TransformationErrorHandling.ThrowError``. - """ - - info_type_transformations = proto.Field( - proto.MESSAGE, - number=1, - oneof='transformation', - message='InfoTypeTransformations', - ) - record_transformations = proto.Field( - proto.MESSAGE, - number=2, - oneof='transformation', - message='RecordTransformations', - ) - transformation_error_handling = proto.Field( - proto.MESSAGE, - number=3, - message='TransformationErrorHandling', - ) - - -class TransformationErrorHandling(proto.Message): - r"""How to handle transformation errors during de-identification. A - transformation error occurs when the requested transformation is - incompatible with the data. For example, trying to de-identify an IP - address using a ``DateShift`` transformation would result in a - transformation error, since date info cannot be extracted from an IP - address. Information about any incompatible transformations, and how - they were handled, is returned in the response as part of the - ``TransformationOverviews``. - - Attributes: - throw_error (google.cloud.dlp_v2.types.TransformationErrorHandling.ThrowError): - Throw an error - leave_untransformed (google.cloud.dlp_v2.types.TransformationErrorHandling.LeaveUntransformed): - Ignore errors - """ - - class ThrowError(proto.Message): - r"""Throw an error and fail the request when a transformation - error occurs. - """ - - class LeaveUntransformed(proto.Message): - r"""Skips the data without modifying it if the requested transformation - would cause an error. For example, if a ``DateShift`` transformation - were applied an an IP address, this mode would leave the IP address - unchanged in the response. - """ - - throw_error = proto.Field( - proto.MESSAGE, - number=1, - oneof='mode', - message=ThrowError, - ) - leave_untransformed = proto.Field( - proto.MESSAGE, - number=2, - oneof='mode', - message=LeaveUntransformed, - ) - - -class PrimitiveTransformation(proto.Message): - r"""A rule for transforming a value. - Attributes: - replace_config (google.cloud.dlp_v2.types.ReplaceValueConfig): - Replace - redact_config (google.cloud.dlp_v2.types.RedactConfig): - Redact - character_mask_config (google.cloud.dlp_v2.types.CharacterMaskConfig): - Mask - crypto_replace_ffx_fpe_config (google.cloud.dlp_v2.types.CryptoReplaceFfxFpeConfig): - Ffx-Fpe - fixed_size_bucketing_config (google.cloud.dlp_v2.types.FixedSizeBucketingConfig): - Fixed size bucketing - bucketing_config (google.cloud.dlp_v2.types.BucketingConfig): - Bucketing - replace_with_info_type_config (google.cloud.dlp_v2.types.ReplaceWithInfoTypeConfig): - Replace with infotype - time_part_config (google.cloud.dlp_v2.types.TimePartConfig): - Time extraction - crypto_hash_config (google.cloud.dlp_v2.types.CryptoHashConfig): - Crypto - date_shift_config (google.cloud.dlp_v2.types.DateShiftConfig): - Date Shift - crypto_deterministic_config (google.cloud.dlp_v2.types.CryptoDeterministicConfig): - Deterministic Crypto - """ - - replace_config = proto.Field( - proto.MESSAGE, - number=1, - oneof='transformation', - message='ReplaceValueConfig', - ) - redact_config = proto.Field( - proto.MESSAGE, - number=2, - oneof='transformation', - message='RedactConfig', - ) - character_mask_config = proto.Field( - proto.MESSAGE, - number=3, - oneof='transformation', - message='CharacterMaskConfig', - ) - crypto_replace_ffx_fpe_config = proto.Field( - proto.MESSAGE, - number=4, - oneof='transformation', - message='CryptoReplaceFfxFpeConfig', - ) - fixed_size_bucketing_config = proto.Field( - proto.MESSAGE, - number=5, - oneof='transformation', - message='FixedSizeBucketingConfig', - ) - bucketing_config = proto.Field( - proto.MESSAGE, - number=6, - oneof='transformation', - message='BucketingConfig', - ) - replace_with_info_type_config = proto.Field( - proto.MESSAGE, - number=7, - oneof='transformation', - message='ReplaceWithInfoTypeConfig', - ) - time_part_config = proto.Field( - proto.MESSAGE, - number=8, - oneof='transformation', - message='TimePartConfig', - ) - crypto_hash_config = proto.Field( - proto.MESSAGE, - number=9, - oneof='transformation', - message='CryptoHashConfig', - ) - date_shift_config = proto.Field( - proto.MESSAGE, - number=11, - oneof='transformation', - message='DateShiftConfig', - ) - crypto_deterministic_config = proto.Field( - proto.MESSAGE, - number=12, - oneof='transformation', - message='CryptoDeterministicConfig', - ) - - -class TimePartConfig(proto.Message): - r"""For use with ``Date``, ``Timestamp``, and ``TimeOfDay``, extract or - preserve a portion of the value. - - Attributes: - part_to_extract (google.cloud.dlp_v2.types.TimePartConfig.TimePart): - The part of the time to keep. - """ - class TimePart(proto.Enum): - r"""Components that make up time.""" - TIME_PART_UNSPECIFIED = 0 - YEAR = 1 - MONTH = 2 - DAY_OF_MONTH = 3 - DAY_OF_WEEK = 4 - WEEK_OF_YEAR = 5 - HOUR_OF_DAY = 6 - - part_to_extract = proto.Field( - proto.ENUM, - number=1, - enum=TimePart, - ) - - -class CryptoHashConfig(proto.Message): - r"""Pseudonymization method that generates surrogates via - cryptographic hashing. Uses SHA-256. - The key size must be either 32 or 64 bytes. - Outputs a base64 encoded representation of the hashed output - (for example, L7k0BHmF1ha5U3NfGykjro4xWi1MPVQPjhMAZbSV9mM=). - Currently, only string and integer values can be hashed. See - https://cloud.google.com/dlp/docs/pseudonymization to learn - more. - - Attributes: - crypto_key (google.cloud.dlp_v2.types.CryptoKey): - The key used by the hash function. - """ - - crypto_key = proto.Field( - proto.MESSAGE, - number=1, - message='CryptoKey', - ) - - -class CryptoDeterministicConfig(proto.Message): - r"""Pseudonymization method that generates deterministic - encryption for the given input. Outputs a base64 encoded - representation of the encrypted output. Uses AES-SIV based on - the RFC https://tools.ietf.org/html/rfc5297. - - Attributes: - crypto_key (google.cloud.dlp_v2.types.CryptoKey): - The key used by the encryption function. - surrogate_info_type (google.cloud.dlp_v2.types.InfoType): - The custom info type to annotate the surrogate with. This - annotation will be applied to the surrogate by prefixing it - with the name of the custom info type followed by the number - of characters comprising the surrogate. The following scheme - defines the format: {info type name}({surrogate character - count}):{surrogate} - - For example, if the name of custom info type is - 'MY_TOKEN_INFO_TYPE' and the surrogate is 'abc', the full - replacement value will be: 'MY_TOKEN_INFO_TYPE(3):abc' - - This annotation identifies the surrogate when inspecting - content using the custom info type 'Surrogate'. This - facilitates reversal of the surrogate when it occurs in free - text. - - Note: For record transformations where the entire cell in a - table is being transformed, surrogates are not mandatory. - Surrogates are used to denote the location of the token and - are necessary for re-identification in free form text. - - In order for inspection to work properly, the name of this - info type must not occur naturally anywhere in your data; - otherwise, inspection may either - - - reverse a surrogate that does not correspond to an actual - identifier - - be unable to parse the surrogate and result in an error - - Therefore, choose your custom info type name carefully after - considering what your data looks like. One way to select a - name that has a high chance of yielding reliable detection - is to include one or more unicode characters that are highly - improbable to exist in your data. For example, assuming your - data is entered from a regular ASCII keyboard, the symbol - with the hex code point 29DD might be used like so: - ⧝MY_TOKEN_TYPE. - context (google.cloud.dlp_v2.types.FieldId): - A context may be used for higher security and maintaining - referential integrity such that the same identifier in two - different contexts will be given a distinct surrogate. The - context is appended to plaintext value being encrypted. On - decryption the provided context is validated against the - value used during encryption. If a context was provided - during encryption, same context must be provided during - decryption as well. - - If the context is not set, plaintext would be used as is for - encryption. If the context is set but: - - 1. there is no record present when transforming a given - value or - 2. the field is not present when transforming a given value, - - plaintext would be used as is for encryption. - - Note that case (1) is expected when an - ``InfoTypeTransformation`` is applied to both structured and - non-structured ``ContentItem``\ s. - """ - - crypto_key = proto.Field( - proto.MESSAGE, - number=1, - message='CryptoKey', - ) - surrogate_info_type = proto.Field( - proto.MESSAGE, - number=2, - message=storage.InfoType, - ) - context = proto.Field( - proto.MESSAGE, - number=3, - message=storage.FieldId, - ) - - -class ReplaceValueConfig(proto.Message): - r"""Replace each input value with a given ``Value``. - Attributes: - new_value (google.cloud.dlp_v2.types.Value): - Value to replace it with. - """ - - new_value = proto.Field( - proto.MESSAGE, - number=1, - message='Value', - ) - - -class ReplaceWithInfoTypeConfig(proto.Message): - r"""Replace each matching finding with the name of the info_type. """ - - -class RedactConfig(proto.Message): - r"""Redact a given value. For example, if used with an - ``InfoTypeTransformation`` transforming PHONE_NUMBER, and input 'My - phone number is 206-555-0123', the output would be 'My phone number - is '. - """ - - -class CharsToIgnore(proto.Message): - r"""Characters to skip when doing deidentification of a value. - These will be left alone and skipped. - - Attributes: - characters_to_skip (str): - Characters to not transform when masking. - common_characters_to_ignore (google.cloud.dlp_v2.types.CharsToIgnore.CommonCharsToIgnore): - Common characters to not transform when - masking. Useful to avoid removing punctuation. - """ - class CommonCharsToIgnore(proto.Enum): - r"""Convenience enum for indication common characters to not - transform. - """ - COMMON_CHARS_TO_IGNORE_UNSPECIFIED = 0 - NUMERIC = 1 - ALPHA_UPPER_CASE = 2 - ALPHA_LOWER_CASE = 3 - PUNCTUATION = 4 - WHITESPACE = 5 - - characters_to_skip = proto.Field( - proto.STRING, - number=1, - oneof='characters', - ) - common_characters_to_ignore = proto.Field( - proto.ENUM, - number=2, - oneof='characters', - enum=CommonCharsToIgnore, - ) - - -class CharacterMaskConfig(proto.Message): - r"""Partially mask a string by replacing a given number of characters - with a fixed character. Masking can start from the beginning or end - of the string. This can be used on data of any type (numbers, longs, - and so on) and when de-identifying structured data we'll attempt to - preserve the original data's type. (This allows you to take a long - like 123 and modify it to a string like \**3. - - Attributes: - masking_character (str): - Character to use to mask the sensitive values—for example, - ``*`` for an alphabetic string such as a name, or ``0`` for - a numeric string such as ZIP code or credit card number. - This string must have a length of 1. If not supplied, this - value defaults to ``*`` for strings, and ``0`` for digits. - number_to_mask (int): - Number of characters to mask. If not set, all - matching chars will be masked. Skipped - characters do not count towards this tally. - reverse_order (bool): - Mask characters in reverse order. For example, if - ``masking_character`` is ``0``, ``number_to_mask`` is - ``14``, and ``reverse_order`` is ``false``, then the input - string ``1234-5678-9012-3456`` is masked as - ``00000000000000-3456``. If ``masking_character`` is ``*``, - ``number_to_mask`` is ``3``, and ``reverse_order`` is - ``true``, then the string ``12345`` is masked as ``12***``. - characters_to_ignore (Sequence[google.cloud.dlp_v2.types.CharsToIgnore]): - When masking a string, items in this list will be skipped - when replacing characters. For example, if the input string - is ``555-555-5555`` and you instruct Cloud DLP to skip ``-`` - and mask 5 characters with ``*``, Cloud DLP returns - ``***-**5-5555``. - """ - - masking_character = proto.Field( - proto.STRING, - number=1, - ) - number_to_mask = proto.Field( - proto.INT32, - number=2, - ) - reverse_order = proto.Field( - proto.BOOL, - number=3, - ) - characters_to_ignore = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='CharsToIgnore', - ) - - -class FixedSizeBucketingConfig(proto.Message): - r"""Buckets values based on fixed size ranges. The Bucketing - transformation can provide all of this functionality, but requires - more configuration. This message is provided as a convenience to the - user for simple bucketing strategies. - - The transformed value will be a hyphenated string of - {lower_bound}-{upper_bound}, i.e if lower_bound = 10 and upper_bound - = 20 all values that are within this bucket will be replaced with - "10-20". - - This can be used on data of type: double, long. - - If the bound Value type differs from the type of data being - transformed, we will first attempt converting the type of the data - to be transformed to match the type of the bound before comparing. - - See https://cloud.google.com/dlp/docs/concepts-bucketing to learn - more. - - Attributes: - lower_bound (google.cloud.dlp_v2.types.Value): - Required. Lower bound value of buckets. All values less than - ``lower_bound`` are grouped together into a single bucket; - for example if ``lower_bound`` = 10, then all values less - than 10 are replaced with the value "-10". - upper_bound (google.cloud.dlp_v2.types.Value): - Required. Upper bound value of buckets. All values greater - than upper_bound are grouped together into a single bucket; - for example if ``upper_bound`` = 89, then all values greater - than 89 are replaced with the value "89+". - bucket_size (float): - Required. Size of each bucket (except for minimum and - maximum buckets). So if ``lower_bound`` = 10, - ``upper_bound`` = 89, and ``bucket_size`` = 10, then the - following buckets would be used: -10, 10-20, 20-30, 30-40, - 40-50, 50-60, 60-70, 70-80, 80-89, 89+. Precision up to 2 - decimals works. - """ - - lower_bound = proto.Field( - proto.MESSAGE, - number=1, - message='Value', - ) - upper_bound = proto.Field( - proto.MESSAGE, - number=2, - message='Value', - ) - bucket_size = proto.Field( - proto.DOUBLE, - number=3, - ) - - -class BucketingConfig(proto.Message): - r"""Generalization function that buckets values based on ranges. The - ranges and replacement values are dynamically provided by the user - for custom behavior, such as 1-30 -> LOW 31-65 -> MEDIUM 66-100 -> - HIGH This can be used on data of type: number, long, string, - timestamp. If the bound ``Value`` type differs from the type of data - being transformed, we will first attempt converting the type of the - data to be transformed to match the type of the bound before - comparing. See https://cloud.google.com/dlp/docs/concepts-bucketing - to learn more. - - Attributes: - buckets (Sequence[google.cloud.dlp_v2.types.BucketingConfig.Bucket]): - Set of buckets. Ranges must be non- - verlapping. - """ - - class Bucket(proto.Message): - r"""Bucket is represented as a range, along with replacement - values. - - Attributes: - min_ (google.cloud.dlp_v2.types.Value): - Lower bound of the range, inclusive. Type - should be the same as max if used. - max_ (google.cloud.dlp_v2.types.Value): - Upper bound of the range, exclusive; type - must match min. - replacement_value (google.cloud.dlp_v2.types.Value): - Required. Replacement value for this bucket. - """ - - min_ = proto.Field( - proto.MESSAGE, - number=1, - message='Value', - ) - max_ = proto.Field( - proto.MESSAGE, - number=2, - message='Value', - ) - replacement_value = proto.Field( - proto.MESSAGE, - number=3, - message='Value', - ) - - buckets = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=Bucket, - ) - - -class CryptoReplaceFfxFpeConfig(proto.Message): - r"""Replaces an identifier with a surrogate using Format Preserving - Encryption (FPE) with the FFX mode of operation; however when used - in the ``ReidentifyContent`` API method, it serves the opposite - function by reversing the surrogate back into the original - identifier. The identifier must be encoded as ASCII. For a given - crypto key and context, the same identifier will be replaced with - the same surrogate. Identifiers must be at least two characters - long. In the case that the identifier is the empty string, it will - be skipped. See https://cloud.google.com/dlp/docs/pseudonymization - to learn more. - - Note: We recommend using CryptoDeterministicConfig for all use cases - which do not require preserving the input alphabet space and size, - plus warrant referential integrity. - - Attributes: - crypto_key (google.cloud.dlp_v2.types.CryptoKey): - Required. The key used by the encryption - algorithm. - context (google.cloud.dlp_v2.types.FieldId): - The 'tweak', a context may be used for higher security since - the same identifier in two different contexts won't be given - the same surrogate. If the context is not set, a default - tweak will be used. - - If the context is set but: - - 1. there is no record present when transforming a given - value or - 2. the field is not present when transforming a given value, - - a default tweak will be used. - - Note that case (1) is expected when an - ``InfoTypeTransformation`` is applied to both structured and - non-structured ``ContentItem``\ s. Currently, the referenced - field may be of value type integer or string. - - The tweak is constructed as a sequence of bytes in big - endian byte order such that: - - - a 64 bit integer is encoded followed by a single byte of - value 1 - - a string is encoded in UTF-8 format followed by a single - byte of value 2 - common_alphabet (google.cloud.dlp_v2.types.CryptoReplaceFfxFpeConfig.FfxCommonNativeAlphabet): - Common alphabets. - custom_alphabet (str): - This is supported by mapping these to the alphanumeric - characters that the FFX mode natively supports. This happens - before/after encryption/decryption. Each character listed - must appear only once. Number of characters must be in the - range [2, 95]. This must be encoded as ASCII. The order of - characters does not matter. The full list of allowed - characters is: - 0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz - ~`!@#$%^&*()_-+={[}]|:;"'<,>.?/ - radix (int): - The native way to select the alphabet. Must be in the range - [2, 95]. - surrogate_info_type (google.cloud.dlp_v2.types.InfoType): - The custom infoType to annotate the surrogate with. This - annotation will be applied to the surrogate by prefixing it - with the name of the custom infoType followed by the number - of characters comprising the surrogate. The following scheme - defines the format: - info_type_name(surrogate_character_count):surrogate - - For example, if the name of custom infoType is - 'MY_TOKEN_INFO_TYPE' and the surrogate is 'abc', the full - replacement value will be: 'MY_TOKEN_INFO_TYPE(3):abc' - - This annotation identifies the surrogate when inspecting - content using the custom infoType - ```SurrogateType`` `__. - This facilitates reversal of the surrogate when it occurs in - free text. - - In order for inspection to work properly, the name of this - infoType must not occur naturally anywhere in your data; - otherwise, inspection may find a surrogate that does not - correspond to an actual identifier. Therefore, choose your - custom infoType name carefully after considering what your - data looks like. One way to select a name that has a high - chance of yielding reliable detection is to include one or - more unicode characters that are highly improbable to exist - in your data. For example, assuming your data is entered - from a regular ASCII keyboard, the symbol with the hex code - point 29DD might be used like so: ⧝MY_TOKEN_TYPE - """ - class FfxCommonNativeAlphabet(proto.Enum): - r"""These are commonly used subsets of the alphabet that the FFX - mode natively supports. In the algorithm, the alphabet is - selected using the "radix". Therefore each corresponds to - particular radix. - """ - FFX_COMMON_NATIVE_ALPHABET_UNSPECIFIED = 0 - NUMERIC = 1 - HEXADECIMAL = 2 - UPPER_CASE_ALPHA_NUMERIC = 3 - ALPHA_NUMERIC = 4 - - crypto_key = proto.Field( - proto.MESSAGE, - number=1, - message='CryptoKey', - ) - context = proto.Field( - proto.MESSAGE, - number=2, - message=storage.FieldId, - ) - common_alphabet = proto.Field( - proto.ENUM, - number=4, - oneof='alphabet', - enum=FfxCommonNativeAlphabet, - ) - custom_alphabet = proto.Field( - proto.STRING, - number=5, - oneof='alphabet', - ) - radix = proto.Field( - proto.INT32, - number=6, - oneof='alphabet', - ) - surrogate_info_type = proto.Field( - proto.MESSAGE, - number=8, - message=storage.InfoType, - ) - - -class CryptoKey(proto.Message): - r"""This is a data encryption key (DEK) (as opposed to - a key encryption key (KEK) stored by KMS). - When using KMS to wrap/unwrap DEKs, be sure to set an - appropriate IAM policy on the KMS CryptoKey (KEK) to ensure an - attacker cannot unwrap the data crypto key. - - Attributes: - transient (google.cloud.dlp_v2.types.TransientCryptoKey): - Transient crypto key - unwrapped (google.cloud.dlp_v2.types.UnwrappedCryptoKey): - Unwrapped crypto key - kms_wrapped (google.cloud.dlp_v2.types.KmsWrappedCryptoKey): - Kms wrapped key - """ - - transient = proto.Field( - proto.MESSAGE, - number=1, - oneof='source', - message='TransientCryptoKey', - ) - unwrapped = proto.Field( - proto.MESSAGE, - number=2, - oneof='source', - message='UnwrappedCryptoKey', - ) - kms_wrapped = proto.Field( - proto.MESSAGE, - number=3, - oneof='source', - message='KmsWrappedCryptoKey', - ) - - -class TransientCryptoKey(proto.Message): - r"""Use this to have a random data crypto key generated. - It will be discarded after the request finishes. - - Attributes: - name (str): - Required. Name of the key. This is an arbitrary string used - to differentiate different keys. A unique key is generated - per name: two separate ``TransientCryptoKey`` protos share - the same generated key if their names are the same. When the - data crypto key is generated, this name is not used in any - way (repeating the api call will result in a different key - being generated). - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class UnwrappedCryptoKey(proto.Message): - r"""Using raw keys is prone to security risks due to accidentally - leaking the key. Choose another type of key if possible. - - Attributes: - key (bytes): - Required. A 128/192/256 bit key. - """ - - key = proto.Field( - proto.BYTES, - number=1, - ) - - -class KmsWrappedCryptoKey(proto.Message): - r"""Include to use an existing data crypto key wrapped by KMS. - The wrapped key must be a 128/192/256 bit key. - Authorization requires the following IAM permissions when - sending a request to perform a crypto transformation using a - kms-wrapped crypto key: dlp.kms.encrypt - - Attributes: - wrapped_key (bytes): - Required. The wrapped data crypto key. - crypto_key_name (str): - Required. The resource name of the KMS - CryptoKey to use for unwrapping. - """ - - wrapped_key = proto.Field( - proto.BYTES, - number=1, - ) - crypto_key_name = proto.Field( - proto.STRING, - number=2, - ) - - -class DateShiftConfig(proto.Message): - r"""Shifts dates by random number of days, with option to be - consistent for the same context. See - https://cloud.google.com/dlp/docs/concepts-date-shifting to - learn more. - - Attributes: - upper_bound_days (int): - Required. Range of shift in days. Actual - shift will be selected at random within this - range (inclusive ends). Negative means shift to - earlier in time. Must not be more than 365250 - days (1000 years) each direction. - For example, 3 means shift date to at most 3 - days into the future. - lower_bound_days (int): - Required. For example, -5 means shift date to - at most 5 days back in the past. - context (google.cloud.dlp_v2.types.FieldId): - Points to the field that contains the - context, for example, an entity id. If set, must - also set cryptoKey. If set, shift will be - consistent for the given context. - crypto_key (google.cloud.dlp_v2.types.CryptoKey): - Causes the shift to be computed based on this key and the - context. This results in the same shift for the same context - and crypto_key. If set, must also set context. Can only be - applied to table items. - """ - - upper_bound_days = proto.Field( - proto.INT32, - number=1, - ) - lower_bound_days = proto.Field( - proto.INT32, - number=2, - ) - context = proto.Field( - proto.MESSAGE, - number=3, - message=storage.FieldId, - ) - crypto_key = proto.Field( - proto.MESSAGE, - number=4, - oneof='method', - message='CryptoKey', - ) - - -class InfoTypeTransformations(proto.Message): - r"""A type of transformation that will scan unstructured text and apply - various ``PrimitiveTransformation``\ s to each finding, where the - transformation is applied to only values that were identified as a - specific info_type. - - Attributes: - transformations (Sequence[google.cloud.dlp_v2.types.InfoTypeTransformations.InfoTypeTransformation]): - Required. Transformation for each infoType. - Cannot specify more than one for a given - infoType. - """ - - class InfoTypeTransformation(proto.Message): - r"""A transformation to apply to text that is identified as a specific - info_type. - - Attributes: - info_types (Sequence[google.cloud.dlp_v2.types.InfoType]): - InfoTypes to apply the transformation to. An empty list will - cause this transformation to apply to all findings that - correspond to infoTypes that were requested in - ``InspectConfig``. - primitive_transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): - Required. Primitive transformation to apply - to the infoType. - """ - - info_types = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - primitive_transformation = proto.Field( - proto.MESSAGE, - number=2, - message='PrimitiveTransformation', - ) - - transformations = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=InfoTypeTransformation, - ) - - -class FieldTransformation(proto.Message): - r"""The transformation to apply to the field. - Attributes: - fields (Sequence[google.cloud.dlp_v2.types.FieldId]): - Required. Input field(s) to apply the - transformation to. - condition (google.cloud.dlp_v2.types.RecordCondition): - Only apply the transformation if the condition evaluates to - true for the given ``RecordCondition``. The conditions are - allowed to reference fields that are not used in the actual - transformation. - - Example Use Cases: - - - Apply a different bucket transformation to an age column - if the zip code column for the same record is within a - specific range. - - Redact a field if the date of birth field is greater than - 85. - primitive_transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): - Apply the transformation to the entire field. - info_type_transformations (google.cloud.dlp_v2.types.InfoTypeTransformations): - Treat the contents of the field as free text, and - selectively transform content that matches an ``InfoType``. - """ - - fields = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - condition = proto.Field( - proto.MESSAGE, - number=3, - message='RecordCondition', - ) - primitive_transformation = proto.Field( - proto.MESSAGE, - number=4, - oneof='transformation', - message='PrimitiveTransformation', - ) - info_type_transformations = proto.Field( - proto.MESSAGE, - number=5, - oneof='transformation', - message='InfoTypeTransformations', - ) - - -class RecordTransformations(proto.Message): - r"""A type of transformation that is applied over structured data - such as a table. - - Attributes: - field_transformations (Sequence[google.cloud.dlp_v2.types.FieldTransformation]): - Transform the record by applying various - field transformations. - record_suppressions (Sequence[google.cloud.dlp_v2.types.RecordSuppression]): - Configuration defining which records get - suppressed entirely. Records that match any - suppression rule are omitted from the output. - """ - - field_transformations = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='FieldTransformation', - ) - record_suppressions = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='RecordSuppression', - ) - - -class RecordSuppression(proto.Message): - r"""Configuration to suppress records whose suppression - conditions evaluate to true. - - Attributes: - condition (google.cloud.dlp_v2.types.RecordCondition): - A condition that when it evaluates to true - will result in the record being evaluated to be - suppressed from the transformed content. - """ - - condition = proto.Field( - proto.MESSAGE, - number=1, - message='RecordCondition', - ) - - -class RecordCondition(proto.Message): - r"""A condition for determining whether a transformation should - be applied to a field. - - Attributes: - expressions (google.cloud.dlp_v2.types.RecordCondition.Expressions): - An expression. - """ - - class Condition(proto.Message): - r"""The field type of ``value`` and ``field`` do not need to match to be - considered equal, but not all comparisons are possible. EQUAL_TO and - NOT_EQUAL_TO attempt to compare even with incompatible types, but - all other comparisons are invalid with incompatible types. A - ``value`` of type: - - - ``string`` can be compared against all other types - - ``boolean`` can only be compared against other booleans - - ``integer`` can be compared against doubles or a string if the - string value can be parsed as an integer. - - ``double`` can be compared against integers or a string if the - string can be parsed as a double. - - ``Timestamp`` can be compared against strings in RFC 3339 date - string format. - - ``TimeOfDay`` can be compared against timestamps and strings in - the format of 'HH:mm:ss'. - - If we fail to compare do to type mismatch, a warning will be given - and the condition will evaluate to false. - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Required. Field within the record this - condition is evaluated against. - operator (google.cloud.dlp_v2.types.RelationalOperator): - Required. Operator used to compare the field - or infoType to the value. - value (google.cloud.dlp_v2.types.Value): - Value to compare against. [Mandatory, except for ``EXISTS`` - tests.] - """ - - field = proto.Field( - proto.MESSAGE, - number=1, - message=storage.FieldId, - ) - operator = proto.Field( - proto.ENUM, - number=3, - enum='RelationalOperator', - ) - value = proto.Field( - proto.MESSAGE, - number=4, - message='Value', - ) - - class Conditions(proto.Message): - r"""A collection of conditions. - Attributes: - conditions (Sequence[google.cloud.dlp_v2.types.RecordCondition.Condition]): - A collection of conditions. - """ - - conditions = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='RecordCondition.Condition', - ) - - class Expressions(proto.Message): - r"""An expression, consisting or an operator and conditions. - Attributes: - logical_operator (google.cloud.dlp_v2.types.RecordCondition.Expressions.LogicalOperator): - The operator to apply to the result of conditions. Default - and currently only supported value is ``AND``. - conditions (google.cloud.dlp_v2.types.RecordCondition.Conditions): - Conditions to apply to the expression. - """ - class LogicalOperator(proto.Enum): - r"""Logical operators for conditional checks.""" - LOGICAL_OPERATOR_UNSPECIFIED = 0 - AND = 1 - - logical_operator = proto.Field( - proto.ENUM, - number=1, - enum='RecordCondition.Expressions.LogicalOperator', - ) - conditions = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message='RecordCondition.Conditions', - ) - - expressions = proto.Field( - proto.MESSAGE, - number=3, - message=Expressions, - ) - - -class TransformationOverview(proto.Message): - r"""Overview of the modifications that occurred. - Attributes: - transformed_bytes (int): - Total size in bytes that were transformed in - some way. - transformation_summaries (Sequence[google.cloud.dlp_v2.types.TransformationSummary]): - Transformations applied to the dataset. - """ - - transformed_bytes = proto.Field( - proto.INT64, - number=2, - ) - transformation_summaries = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='TransformationSummary', - ) - - -class TransformationSummary(proto.Message): - r"""Summary of a single transformation. Only one of 'transformation', - 'field_transformation', or 'record_suppress' will be set. - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - Set if the transformation was limited to a - specific InfoType. - field (google.cloud.dlp_v2.types.FieldId): - Set if the transformation was limited to a - specific FieldId. - transformation (google.cloud.dlp_v2.types.PrimitiveTransformation): - The specific transformation these stats apply - to. - field_transformations (Sequence[google.cloud.dlp_v2.types.FieldTransformation]): - The field transformation that was applied. - If multiple field transformations are requested - for a single field, this list will contain all - of them; otherwise, only one is supplied. - record_suppress (google.cloud.dlp_v2.types.RecordSuppression): - The specific suppression option these stats - apply to. - results (Sequence[google.cloud.dlp_v2.types.TransformationSummary.SummaryResult]): - Collection of all transformations that took - place or had an error. - transformed_bytes (int): - Total size in bytes that were transformed in - some way. - """ - class TransformationResultCode(proto.Enum): - r"""Possible outcomes of transformations.""" - TRANSFORMATION_RESULT_CODE_UNSPECIFIED = 0 - SUCCESS = 1 - ERROR = 2 - - class SummaryResult(proto.Message): - r"""A collection that informs the user the number of times a particular - ``TransformationResultCode`` and error details occurred. - - Attributes: - count (int): - Number of transformations counted by this - result. - code (google.cloud.dlp_v2.types.TransformationSummary.TransformationResultCode): - Outcome of the transformation. - details (str): - A place for warnings or errors to show up if - a transformation didn't work as expected. - """ - - count = proto.Field( - proto.INT64, - number=1, - ) - code = proto.Field( - proto.ENUM, - number=2, - enum='TransformationSummary.TransformationResultCode', - ) - details = proto.Field( - proto.STRING, - number=3, - ) - - info_type = proto.Field( - proto.MESSAGE, - number=1, - message=storage.InfoType, - ) - field = proto.Field( - proto.MESSAGE, - number=2, - message=storage.FieldId, - ) - transformation = proto.Field( - proto.MESSAGE, - number=3, - message='PrimitiveTransformation', - ) - field_transformations = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='FieldTransformation', - ) - record_suppress = proto.Field( - proto.MESSAGE, - number=6, - message='RecordSuppression', - ) - results = proto.RepeatedField( - proto.MESSAGE, - number=4, - message=SummaryResult, - ) - transformed_bytes = proto.Field( - proto.INT64, - number=7, - ) - - -class Schedule(proto.Message): - r"""Schedule for triggeredJobs. - Attributes: - recurrence_period_duration (google.protobuf.duration_pb2.Duration): - With this option a job is started a regular - periodic basis. For example: every day (86400 - seconds). - A scheduled start time will be skipped if the - previous execution has not ended when its - scheduled time occurs. - This value must be set to a time duration - greater than or equal to 1 day and can be no - longer than 60 days. - """ - - recurrence_period_duration = proto.Field( - proto.MESSAGE, - number=1, - oneof='option', - message=duration_pb2.Duration, - ) - - -class Manual(proto.Message): - r"""Job trigger option for hybrid jobs. Jobs must be manually - created and finished. - """ - - -class InspectTemplate(proto.Message): - r"""The inspectTemplate contains a configuration (set of types of - sensitive data to be detected) to be used anywhere you otherwise - would normally specify InspectConfig. See - https://cloud.google.com/dlp/docs/concepts-templates to learn - more. - - Attributes: - name (str): - Output only. The template name. - - The template will have one of the following formats: - ``projects/PROJECT_ID/inspectTemplates/TEMPLATE_ID`` OR - ``organizations/ORGANIZATION_ID/inspectTemplates/TEMPLATE_ID``; - display_name (str): - Display name (max 256 chars). - description (str): - Short description (max 256 chars). - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The creation timestamp of an - inspectTemplate. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last update timestamp of an - inspectTemplate. - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - The core content of the template. - Configuration of the scanning process. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - display_name = proto.Field( - proto.STRING, - number=2, - ) - description = proto.Field( - proto.STRING, - number=3, - ) - create_time = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - update_time = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - inspect_config = proto.Field( - proto.MESSAGE, - number=6, - message='InspectConfig', - ) - - -class DeidentifyTemplate(proto.Message): - r"""DeidentifyTemplates contains instructions on how to de- - dentify content. See https://cloud.google.com/dlp/docs/concepts- - templates to learn more. - - Attributes: - name (str): - Output only. The template name. - - The template will have one of the following formats: - ``projects/PROJECT_ID/deidentifyTemplates/TEMPLATE_ID`` OR - ``organizations/ORGANIZATION_ID/deidentifyTemplates/TEMPLATE_ID`` - display_name (str): - Display name (max 256 chars). - description (str): - Short description (max 256 chars). - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The creation timestamp of an - inspectTemplate. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last update timestamp of an - inspectTemplate. - deidentify_config (google.cloud.dlp_v2.types.DeidentifyConfig): - ///////////// // The core content of the - template // /////////////// - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - display_name = proto.Field( - proto.STRING, - number=2, - ) - description = proto.Field( - proto.STRING, - number=3, - ) - create_time = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - update_time = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - deidentify_config = proto.Field( - proto.MESSAGE, - number=6, - message='DeidentifyConfig', - ) - - -class Error(proto.Message): - r"""Details information about an error encountered during job - execution or the results of an unsuccessful activation of the - JobTrigger. - - Attributes: - details (google.rpc.status_pb2.Status): - Detailed error codes and messages. - timestamps (Sequence[google.protobuf.timestamp_pb2.Timestamp]): - The times the error occurred. - """ - - details = proto.Field( - proto.MESSAGE, - number=1, - message=status_pb2.Status, - ) - timestamps = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - - -class JobTrigger(proto.Message): - r"""Contains a configuration to make dlp api calls on a repeating - basis. See https://cloud.google.com/dlp/docs/concepts-job- - triggers to learn more. - - Attributes: - name (str): - Unique resource name for the triggeredJob, assigned by the - service when the triggeredJob is created, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - display_name (str): - Display name (max 100 chars) - description (str): - User provided description (max 256 chars) - inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): - For inspect jobs, a snapshot of the - configuration. - triggers (Sequence[google.cloud.dlp_v2.types.JobTrigger.Trigger]): - A list of triggers which will be OR'ed - together. Only one in the list needs to trigger - for a job to be started. The list may contain - only a single Schedule trigger and must have at - least one object. - errors (Sequence[google.cloud.dlp_v2.types.Error]): - Output only. A stream of errors encountered - when the trigger was activated. Repeated errors - may result in the JobTrigger automatically being - paused. Will return the last 100 errors. - Whenever the JobTrigger is modified this list - will be cleared. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The creation timestamp of a - triggeredJob. - update_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The last update timestamp of a - triggeredJob. - last_run_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The timestamp of the last time - this trigger executed. - status (google.cloud.dlp_v2.types.JobTrigger.Status): - Required. A status for this trigger. - """ - class Status(proto.Enum): - r"""Whether the trigger is currently active. If PAUSED or - CANCELLED, no jobs will be created with this configuration. The - service may automatically pause triggers experiencing frequent - errors. To restart a job, set the status to HEALTHY after - correcting user errors. - """ - STATUS_UNSPECIFIED = 0 - HEALTHY = 1 - PAUSED = 2 - CANCELLED = 3 - - class Trigger(proto.Message): - r"""What event needs to occur for a new job to be started. - Attributes: - schedule (google.cloud.dlp_v2.types.Schedule): - Create a job on a repeating basis based on - the elapse of time. - manual (google.cloud.dlp_v2.types.Manual): - For use with hybrid jobs. Jobs must be - manually created and finished. Early access - feature is in a pre-release state and might - change or have limited support. For more - information, see - https://cloud.google.com/products#product- - launch-stages. - """ - - schedule = proto.Field( - proto.MESSAGE, - number=1, - oneof='trigger', - message='Schedule', - ) - manual = proto.Field( - proto.MESSAGE, - number=2, - oneof='trigger', - message='Manual', - ) - - name = proto.Field( - proto.STRING, - number=1, - ) - display_name = proto.Field( - proto.STRING, - number=2, - ) - description = proto.Field( - proto.STRING, - number=3, - ) - inspect_job = proto.Field( - proto.MESSAGE, - number=4, - oneof='job', - message='InspectJobConfig', - ) - triggers = proto.RepeatedField( - proto.MESSAGE, - number=5, - message=Trigger, - ) - errors = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='Error', - ) - create_time = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - update_time = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - last_run_time = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp_pb2.Timestamp, - ) - status = proto.Field( - proto.ENUM, - number=10, - enum=Status, - ) - - -class Action(proto.Message): - r"""A task to execute on the completion of a job. - See https://cloud.google.com/dlp/docs/concepts-actions to learn - more. - - Attributes: - save_findings (google.cloud.dlp_v2.types.Action.SaveFindings): - Save resulting findings in a provided - location. - pub_sub (google.cloud.dlp_v2.types.Action.PublishToPubSub): - Publish a notification to a pubsub topic. - publish_summary_to_cscc (google.cloud.dlp_v2.types.Action.PublishSummaryToCscc): - Publish summary to Cloud Security Command - Center (Alpha). - publish_findings_to_cloud_data_catalog (google.cloud.dlp_v2.types.Action.PublishFindingsToCloudDataCatalog): - Publish findings to Cloud Datahub. - job_notification_emails (google.cloud.dlp_v2.types.Action.JobNotificationEmails): - Enable email notification for project owners - and editors on job's completion/failure. - publish_to_stackdriver (google.cloud.dlp_v2.types.Action.PublishToStackdriver): - Enable Stackdriver metric dlp.googleapis.com/finding_count. - """ - - class SaveFindings(proto.Message): - r"""If set, the detailed findings will be persisted to the - specified OutputStorageConfig. Only a single instance of this - action can be specified. - Compatible with: Inspect, Risk - - Attributes: - output_config (google.cloud.dlp_v2.types.OutputStorageConfig): - Location to store findings outside of DLP. - """ - - output_config = proto.Field( - proto.MESSAGE, - number=1, - message='OutputStorageConfig', - ) - - class PublishToPubSub(proto.Message): - r"""Publish a message into given Pub/Sub topic when DlpJob has - completed. The message contains a single field, ``DlpJobName``, - which is equal to the finished job's - ```DlpJob.name`` `__. - Compatible with: Inspect, Risk - - Attributes: - topic (str): - Cloud Pub/Sub topic to send notifications to. - The topic must have given publishing access - rights to the DLP API service account executing - the long running DlpJob sending the - notifications. Format is - projects/{project}/topics/{topic}. - """ - - topic = proto.Field( - proto.STRING, - number=1, - ) - - class PublishSummaryToCscc(proto.Message): - r"""Publish the result summary of a DlpJob to the Cloud Security - Command Center (CSCC Alpha). - This action is only available for projects which are parts of an - organization and whitelisted for the alpha Cloud Security - Command Center. - The action will publish count of finding instances and their - info types. The summary of findings will be persisted in CSCC - and are governed by CSCC service-specific policy, see - https://cloud.google.com/terms/service-terms Only a single - instance of this action can be specified. Compatible with: - Inspect - """ - - class PublishFindingsToCloudDataCatalog(proto.Message): - r"""Publish findings of a DlpJob to Cloud Data Catalog. Labels - summarizing the results of the DlpJob will be applied to the - entry for the resource scanned in Cloud Data Catalog. Any labels - previously written by another DlpJob will be deleted. InfoType - naming patterns are strictly enforced when using this feature. - Note that the findings will be persisted in Cloud Data Catalog - storage and are governed by Data Catalog service-specific - policy, see https://cloud.google.com/terms/service-terms - Only a single instance of this action can be specified and only - allowed if all resources being scanned are BigQuery tables. - Compatible with: Inspect - """ - - class JobNotificationEmails(proto.Message): - r"""Enable email notification to project owners and editors on - jobs's completion/failure. - """ - - class PublishToStackdriver(proto.Message): - r"""Enable Stackdriver metric dlp.googleapis.com/finding_count. This - will publish a metric to stack driver on each infotype requested and - how many findings were found for it. CustomDetectors will be - bucketed as 'Custom' under the Stackdriver label 'info_type'. - """ - - save_findings = proto.Field( - proto.MESSAGE, - number=1, - oneof='action', - message=SaveFindings, - ) - pub_sub = proto.Field( - proto.MESSAGE, - number=2, - oneof='action', - message=PublishToPubSub, - ) - publish_summary_to_cscc = proto.Field( - proto.MESSAGE, - number=3, - oneof='action', - message=PublishSummaryToCscc, - ) - publish_findings_to_cloud_data_catalog = proto.Field( - proto.MESSAGE, - number=5, - oneof='action', - message=PublishFindingsToCloudDataCatalog, - ) - job_notification_emails = proto.Field( - proto.MESSAGE, - number=8, - oneof='action', - message=JobNotificationEmails, - ) - publish_to_stackdriver = proto.Field( - proto.MESSAGE, - number=9, - oneof='action', - message=PublishToStackdriver, - ) - - -class CreateInspectTemplateRequest(proto.Message): - r"""Request message for CreateInspectTemplate. - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization) and whether you have - `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults to - global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - inspect_template (google.cloud.dlp_v2.types.InspectTemplate): - Required. The InspectTemplate to create. - template_id (str): - The template id can contain uppercase and lowercase letters, - numbers, and hyphens; that is, it must match the regular - expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 - characters. Can be empty to allow the system to generate - one. - location_id (str): - Deprecated. This field has no effect. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - inspect_template = proto.Field( - proto.MESSAGE, - number=2, - message='InspectTemplate', - ) - template_id = proto.Field( - proto.STRING, - number=3, - ) - location_id = proto.Field( - proto.STRING, - number=4, - ) - - -class UpdateInspectTemplateRequest(proto.Message): - r"""Request message for UpdateInspectTemplate. - Attributes: - name (str): - Required. Resource name of organization and inspectTemplate - to be updated, for example - ``organizations/433245324/inspectTemplates/432452342`` or - projects/project-id/inspectTemplates/432452342. - inspect_template (google.cloud.dlp_v2.types.InspectTemplate): - New InspectTemplate value. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get updated. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - inspect_template = proto.Field( - proto.MESSAGE, - number=2, - message='InspectTemplate', - ) - update_mask = proto.Field( - proto.MESSAGE, - number=3, - message=field_mask_pb2.FieldMask, - ) - - -class GetInspectTemplateRequest(proto.Message): - r"""Request message for GetInspectTemplate. - Attributes: - name (str): - Required. Resource name of the organization and - inspectTemplate to be read, for example - ``organizations/433245324/inspectTemplates/432452342`` or - projects/project-id/inspectTemplates/432452342. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class ListInspectTemplatesRequest(proto.Message): - r"""Request message for ListInspectTemplates. - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization) and whether you have - `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults to - global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - page_token (str): - Page token to continue retrieval. Comes from previous call - to ``ListInspectTemplates``. - page_size (int): - Size of the page, can be limited by server. - If zero server returns a page of max size 100. - order_by (str): - Comma separated list of fields to order by, followed by - ``asc`` or ``desc`` postfix. This list is case-insensitive, - default sorting order is ascending, redundant space - characters are insignificant. - - Example: ``name asc,update_time, create_time desc`` - - Supported fields are: - - - ``create_time``: corresponds to time the template was - created. - - ``update_time``: corresponds to time the template was - last updated. - - ``name``: corresponds to template's name. - - ``display_name``: corresponds to template's display name. - location_id (str): - Deprecated. This field has no effect. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - page_token = proto.Field( - proto.STRING, - number=2, - ) - page_size = proto.Field( - proto.INT32, - number=3, - ) - order_by = proto.Field( - proto.STRING, - number=4, - ) - location_id = proto.Field( - proto.STRING, - number=5, - ) - - -class ListInspectTemplatesResponse(proto.Message): - r"""Response message for ListInspectTemplates. - Attributes: - inspect_templates (Sequence[google.cloud.dlp_v2.types.InspectTemplate]): - List of inspectTemplates, up to page_size in - ListInspectTemplatesRequest. - next_page_token (str): - If the next page is available then the next - page token to be used in following - ListInspectTemplates request. - """ - - @property - def raw_page(self): - return self - - inspect_templates = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='InspectTemplate', - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteInspectTemplateRequest(proto.Message): - r"""Request message for DeleteInspectTemplate. - Attributes: - name (str): - Required. Resource name of the organization and - inspectTemplate to be deleted, for example - ``organizations/433245324/inspectTemplates/432452342`` or - projects/project-id/inspectTemplates/432452342. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateJobTriggerRequest(proto.Message): - r"""Request message for CreateJobTrigger. - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - job_trigger (google.cloud.dlp_v2.types.JobTrigger): - Required. The JobTrigger to create. - trigger_id (str): - The trigger id can contain uppercase and lowercase letters, - numbers, and hyphens; that is, it must match the regular - expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 - characters. Can be empty to allow the system to generate - one. - location_id (str): - Deprecated. This field has no effect. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - job_trigger = proto.Field( - proto.MESSAGE, - number=2, - message='JobTrigger', - ) - trigger_id = proto.Field( - proto.STRING, - number=3, - ) - location_id = proto.Field( - proto.STRING, - number=4, - ) - - -class ActivateJobTriggerRequest(proto.Message): - r"""Request message for ActivateJobTrigger. - Attributes: - name (str): - Required. Resource name of the trigger to activate, for - example ``projects/dlp-test-project/jobTriggers/53234423``. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class UpdateJobTriggerRequest(proto.Message): - r"""Request message for UpdateJobTrigger. - Attributes: - name (str): - Required. Resource name of the project and the triggeredJob, - for example - ``projects/dlp-test-project/jobTriggers/53234423``. - job_trigger (google.cloud.dlp_v2.types.JobTrigger): - New JobTrigger value. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get updated. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - job_trigger = proto.Field( - proto.MESSAGE, - number=2, - message='JobTrigger', - ) - update_mask = proto.Field( - proto.MESSAGE, - number=3, - message=field_mask_pb2.FieldMask, - ) - - -class GetJobTriggerRequest(proto.Message): - r"""Request message for GetJobTrigger. - Attributes: - name (str): - Required. Resource name of the project and the triggeredJob, - for example - ``projects/dlp-test-project/jobTriggers/53234423``. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateDlpJobRequest(proto.Message): - r"""Request message for CreateDlpJobRequest. Used to initiate - long running jobs such as calculating risk metrics or inspecting - Google Cloud Storage. - - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - inspect_job (google.cloud.dlp_v2.types.InspectJobConfig): - Set to control what and how to inspect. - risk_job (google.cloud.dlp_v2.types.RiskAnalysisJobConfig): - Set to choose what metric to calculate. - job_id (str): - The job id can contain uppercase and lowercase letters, - numbers, and hyphens; that is, it must match the regular - expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 - characters. Can be empty to allow the system to generate - one. - location_id (str): - Deprecated. This field has no effect. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - inspect_job = proto.Field( - proto.MESSAGE, - number=2, - oneof='job', - message='InspectJobConfig', - ) - risk_job = proto.Field( - proto.MESSAGE, - number=3, - oneof='job', - message='RiskAnalysisJobConfig', - ) - job_id = proto.Field( - proto.STRING, - number=4, - ) - location_id = proto.Field( - proto.STRING, - number=5, - ) - - -class ListJobTriggersRequest(proto.Message): - r"""Request message for ListJobTriggers. - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - page_token (str): - Page token to continue retrieval. Comes from previous call - to ListJobTriggers. ``order_by`` field must not change for - subsequent calls. - page_size (int): - Size of the page, can be limited by a server. - order_by (str): - Comma separated list of triggeredJob fields to order by, - followed by ``asc`` or ``desc`` postfix. This list is - case-insensitive, default sorting order is ascending, - redundant space characters are insignificant. - - Example: ``name asc,update_time, create_time desc`` - - Supported fields are: - - - ``create_time``: corresponds to time the JobTrigger was - created. - - ``update_time``: corresponds to time the JobTrigger was - last updated. - - ``last_run_time``: corresponds to the last time the - JobTrigger ran. - - ``name``: corresponds to JobTrigger's name. - - ``display_name``: corresponds to JobTrigger's display - name. - - ``status``: corresponds to JobTrigger's status. - filter (str): - Allows filtering. - - Supported syntax: - - - Filter expressions are made up of one or more - restrictions. - - Restrictions can be combined by ``AND`` or ``OR`` logical - operators. A sequence of restrictions implicitly uses - ``AND``. - - A restriction has the form of - ``{field} {operator} {value}``. - - Supported fields/values for inspect jobs: - - - ``status`` - HEALTHY|PAUSED|CANCELLED - - ``inspected_storage`` - - DATASTORE|CLOUD_STORAGE|BIGQUERY - - 'last_run_time\` - RFC 3339 formatted timestamp, - surrounded by quotation marks. Nanoseconds are - ignored. - - 'error_count' - Number of errors that have occurred - while running. - - - The operator must be ``=`` or ``!=`` for status and - inspected_storage. - - Examples: - - - inspected_storage = cloud_storage AND status = HEALTHY - - inspected_storage = cloud_storage OR inspected_storage = - bigquery - - inspected_storage = cloud_storage AND (state = PAUSED OR - state = HEALTHY) - - last_run_time > "2017-12-12T00:00:00+00:00" - - The length of this field should be no more than 500 - characters. - location_id (str): - Deprecated. This field has no effect. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - page_token = proto.Field( - proto.STRING, - number=2, - ) - page_size = proto.Field( - proto.INT32, - number=3, - ) - order_by = proto.Field( - proto.STRING, - number=4, - ) - filter = proto.Field( - proto.STRING, - number=5, - ) - location_id = proto.Field( - proto.STRING, - number=7, - ) - - -class ListJobTriggersResponse(proto.Message): - r"""Response message for ListJobTriggers. - Attributes: - job_triggers (Sequence[google.cloud.dlp_v2.types.JobTrigger]): - List of triggeredJobs, up to page_size in - ListJobTriggersRequest. - next_page_token (str): - If the next page is available then the next - page token to be used in following - ListJobTriggers request. - """ - - @property - def raw_page(self): - return self - - job_triggers = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='JobTrigger', - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteJobTriggerRequest(proto.Message): - r"""Request message for DeleteJobTrigger. - Attributes: - name (str): - Required. Resource name of the project and the triggeredJob, - for example - ``projects/dlp-test-project/jobTriggers/53234423``. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class InspectJobConfig(proto.Message): - r"""Controls what and how to inspect for findings. - Attributes: - storage_config (google.cloud.dlp_v2.types.StorageConfig): - The data to scan. - inspect_config (google.cloud.dlp_v2.types.InspectConfig): - How and what to scan for. - inspect_template_name (str): - If provided, will be used as the default for all values in - InspectConfig. ``inspect_config`` will be merged into the - values persisted as part of the template. - actions (Sequence[google.cloud.dlp_v2.types.Action]): - Actions to execute at the completion of the - job. - """ - - storage_config = proto.Field( - proto.MESSAGE, - number=1, - message=storage.StorageConfig, - ) - inspect_config = proto.Field( - proto.MESSAGE, - number=2, - message='InspectConfig', - ) - inspect_template_name = proto.Field( - proto.STRING, - number=3, - ) - actions = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='Action', - ) - - -class DlpJob(proto.Message): - r"""Combines all of the information about a DLP job. - Attributes: - name (str): - The server-assigned name. - type_ (google.cloud.dlp_v2.types.DlpJobType): - The type of job. - state (google.cloud.dlp_v2.types.DlpJob.JobState): - State of a job. - risk_details (google.cloud.dlp_v2.types.AnalyzeDataSourceRiskDetails): - Results from analyzing risk of a data source. - inspect_details (google.cloud.dlp_v2.types.InspectDataSourceDetails): - Results from inspecting a data source. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Time when the job was created. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Time when the job started. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Time when the job finished. - job_trigger_name (str): - If created by a job trigger, the resource - name of the trigger that instantiated the job. - errors (Sequence[google.cloud.dlp_v2.types.Error]): - A stream of errors encountered running the - job. - """ - class JobState(proto.Enum): - r"""Possible states of a job. New items may be added.""" - JOB_STATE_UNSPECIFIED = 0 - PENDING = 1 - RUNNING = 2 - DONE = 3 - CANCELED = 4 - FAILED = 5 - ACTIVE = 6 - - name = proto.Field( - proto.STRING, - number=1, - ) - type_ = proto.Field( - proto.ENUM, - number=2, - enum='DlpJobType', - ) - state = proto.Field( - proto.ENUM, - number=3, - enum=JobState, - ) - risk_details = proto.Field( - proto.MESSAGE, - number=4, - oneof='details', - message='AnalyzeDataSourceRiskDetails', - ) - inspect_details = proto.Field( - proto.MESSAGE, - number=5, - oneof='details', - message='InspectDataSourceDetails', - ) - create_time = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - start_time = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - end_time = proto.Field( - proto.MESSAGE, - number=8, - message=timestamp_pb2.Timestamp, - ) - job_trigger_name = proto.Field( - proto.STRING, - number=10, - ) - errors = proto.RepeatedField( - proto.MESSAGE, - number=11, - message='Error', - ) - - -class GetDlpJobRequest(proto.Message): - r"""The request message for [DlpJobs.GetDlpJob][]. - Attributes: - name (str): - Required. The name of the DlpJob resource. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class ListDlpJobsRequest(proto.Message): - r"""The request message for listing DLP jobs. - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on whether you - have `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - filter (str): - Allows filtering. - - Supported syntax: - - - Filter expressions are made up of one or more - restrictions. - - Restrictions can be combined by ``AND`` or ``OR`` logical - operators. A sequence of restrictions implicitly uses - ``AND``. - - A restriction has the form of - ``{field} {operator} {value}``. - - Supported fields/values for inspect jobs: - - - ``state`` - PENDING|RUNNING|CANCELED|FINISHED|FAILED - - ``inspected_storage`` - - DATASTORE|CLOUD_STORAGE|BIGQUERY - - ``trigger_name`` - The resource name of the trigger - that created job. - - 'end_time\` - Corresponds to time the job finished. - - 'start_time\` - Corresponds to time the job finished. - - - Supported fields for risk analysis jobs: - - - ``state`` - RUNNING|CANCELED|FINISHED|FAILED - - 'end_time\` - Corresponds to time the job finished. - - 'start_time\` - Corresponds to time the job finished. - - - The operator must be ``=`` or ``!=``. - - Examples: - - - inspected_storage = cloud_storage AND state = done - - inspected_storage = cloud_storage OR inspected_storage = - bigquery - - inspected_storage = cloud_storage AND (state = done OR - state = canceled) - - end_time > "2017-12-12T00:00:00+00:00" - - The length of this field should be no more than 500 - characters. - page_size (int): - The standard list page size. - page_token (str): - The standard list page token. - type_ (google.cloud.dlp_v2.types.DlpJobType): - The type of job. Defaults to ``DlpJobType.INSPECT`` - order_by (str): - Comma separated list of fields to order by, followed by - ``asc`` or ``desc`` postfix. This list is case-insensitive, - default sorting order is ascending, redundant space - characters are insignificant. - - Example: ``name asc, end_time asc, create_time desc`` - - Supported fields are: - - - ``create_time``: corresponds to time the job was created. - - ``end_time``: corresponds to time the job ended. - - ``name``: corresponds to job's name. - - ``state``: corresponds to ``state`` - location_id (str): - Deprecated. This field has no effect. - """ - - parent = proto.Field( - proto.STRING, - number=4, - ) - filter = proto.Field( - proto.STRING, - number=1, - ) - page_size = proto.Field( - proto.INT32, - number=2, - ) - page_token = proto.Field( - proto.STRING, - number=3, - ) - type_ = proto.Field( - proto.ENUM, - number=5, - enum='DlpJobType', - ) - order_by = proto.Field( - proto.STRING, - number=6, - ) - location_id = proto.Field( - proto.STRING, - number=7, - ) - - -class ListDlpJobsResponse(proto.Message): - r"""The response message for listing DLP jobs. - Attributes: - jobs (Sequence[google.cloud.dlp_v2.types.DlpJob]): - A list of DlpJobs that matches the specified - filter in the request. - next_page_token (str): - The standard List next-page token. - """ - - @property - def raw_page(self): - return self - - jobs = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DlpJob', - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class CancelDlpJobRequest(proto.Message): - r"""The request message for canceling a DLP job. - Attributes: - name (str): - Required. The name of the DlpJob resource to - be cancelled. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class FinishDlpJobRequest(proto.Message): - r"""The request message for finishing a DLP hybrid job. - Attributes: - name (str): - Required. The name of the DlpJob resource to - be cancelled. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteDlpJobRequest(proto.Message): - r"""The request message for deleting a DLP job. - Attributes: - name (str): - Required. The name of the DlpJob resource to - be deleted. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class CreateDeidentifyTemplateRequest(proto.Message): - r"""Request message for CreateDeidentifyTemplate. - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization) and whether you have - `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults to - global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): - Required. The DeidentifyTemplate to create. - template_id (str): - The template id can contain uppercase and lowercase letters, - numbers, and hyphens; that is, it must match the regular - expression: ``[a-zA-Z\d-_]+``. The maximum length is 100 - characters. Can be empty to allow the system to generate - one. - location_id (str): - Deprecated. This field has no effect. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - deidentify_template = proto.Field( - proto.MESSAGE, - number=2, - message='DeidentifyTemplate', - ) - template_id = proto.Field( - proto.STRING, - number=3, - ) - location_id = proto.Field( - proto.STRING, - number=4, - ) - - -class UpdateDeidentifyTemplateRequest(proto.Message): - r"""Request message for UpdateDeidentifyTemplate. - Attributes: - name (str): - Required. Resource name of organization and deidentify - template to be updated, for example - ``organizations/433245324/deidentifyTemplates/432452342`` or - projects/project-id/deidentifyTemplates/432452342. - deidentify_template (google.cloud.dlp_v2.types.DeidentifyTemplate): - New DeidentifyTemplate value. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get updated. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - deidentify_template = proto.Field( - proto.MESSAGE, - number=2, - message='DeidentifyTemplate', - ) - update_mask = proto.Field( - proto.MESSAGE, - number=3, - message=field_mask_pb2.FieldMask, - ) - - -class GetDeidentifyTemplateRequest(proto.Message): - r"""Request message for GetDeidentifyTemplate. - Attributes: - name (str): - Required. Resource name of the organization and deidentify - template to be read, for example - ``organizations/433245324/deidentifyTemplates/432452342`` or - projects/project-id/deidentifyTemplates/432452342. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class ListDeidentifyTemplatesRequest(proto.Message): - r"""Request message for ListDeidentifyTemplates. - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization) and whether you have - `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults to - global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - page_token (str): - Page token to continue retrieval. Comes from previous call - to ``ListDeidentifyTemplates``. - page_size (int): - Size of the page, can be limited by server. - If zero server returns a page of max size 100. - order_by (str): - Comma separated list of fields to order by, followed by - ``asc`` or ``desc`` postfix. This list is case-insensitive, - default sorting order is ascending, redundant space - characters are insignificant. - - Example: ``name asc,update_time, create_time desc`` - - Supported fields are: - - - ``create_time``: corresponds to time the template was - created. - - ``update_time``: corresponds to time the template was - last updated. - - ``name``: corresponds to template's name. - - ``display_name``: corresponds to template's display name. - location_id (str): - Deprecated. This field has no effect. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - page_token = proto.Field( - proto.STRING, - number=2, - ) - page_size = proto.Field( - proto.INT32, - number=3, - ) - order_by = proto.Field( - proto.STRING, - number=4, - ) - location_id = proto.Field( - proto.STRING, - number=5, - ) - - -class ListDeidentifyTemplatesResponse(proto.Message): - r"""Response message for ListDeidentifyTemplates. - Attributes: - deidentify_templates (Sequence[google.cloud.dlp_v2.types.DeidentifyTemplate]): - List of deidentify templates, up to page_size in - ListDeidentifyTemplatesRequest. - next_page_token (str): - If the next page is available then the next - page token to be used in following - ListDeidentifyTemplates request. - """ - - @property - def raw_page(self): - return self - - deidentify_templates = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='DeidentifyTemplate', - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteDeidentifyTemplateRequest(proto.Message): - r"""Request message for DeleteDeidentifyTemplate. - Attributes: - name (str): - Required. Resource name of the organization and deidentify - template to be deleted, for example - ``organizations/433245324/deidentifyTemplates/432452342`` or - projects/project-id/deidentifyTemplates/432452342. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class LargeCustomDictionaryConfig(proto.Message): - r"""Configuration for a custom dictionary created from a data source of - any size up to the maximum size defined in the - `limits `__ page. The artifacts - of dictionary creation are stored in the specified Google Cloud - Storage location. Consider using ``CustomInfoType.Dictionary`` for - smaller dictionaries that satisfy the size requirements. - - Attributes: - output_path (google.cloud.dlp_v2.types.CloudStoragePath): - Location to store dictionary artifacts in - Google Cloud Storage. These files will only be - accessible by project owners and the DLP API. If - any of these artifacts are modified, the - dictionary is considered invalid and can no - longer be used. - cloud_storage_file_set (google.cloud.dlp_v2.types.CloudStorageFileSet): - Set of files containing newline-delimited - lists of dictionary phrases. - big_query_field (google.cloud.dlp_v2.types.BigQueryField): - Field in a BigQuery table where each cell - represents a dictionary phrase. - """ - - output_path = proto.Field( - proto.MESSAGE, - number=1, - message=storage.CloudStoragePath, - ) - cloud_storage_file_set = proto.Field( - proto.MESSAGE, - number=2, - oneof='source', - message=storage.CloudStorageFileSet, - ) - big_query_field = proto.Field( - proto.MESSAGE, - number=3, - oneof='source', - message=storage.BigQueryField, - ) - - -class LargeCustomDictionaryStats(proto.Message): - r"""Summary statistics of a custom dictionary. - Attributes: - approx_num_phrases (int): - Approximate number of distinct phrases in the - dictionary. - """ - - approx_num_phrases = proto.Field( - proto.INT64, - number=1, - ) - - -class StoredInfoTypeConfig(proto.Message): - r"""Configuration for stored infoTypes. All fields and subfield - are provided by the user. For more information, see - https://cloud.google.com/dlp/docs/creating-custom-infotypes. - - Attributes: - display_name (str): - Display name of the StoredInfoType (max 256 - characters). - description (str): - Description of the StoredInfoType (max 256 - characters). - large_custom_dictionary (google.cloud.dlp_v2.types.LargeCustomDictionaryConfig): - StoredInfoType where findings are defined by - a dictionary of phrases. - dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): - Store dictionary-based CustomInfoType. - regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): - Store regular expression-based - StoredInfoType. - """ - - display_name = proto.Field( - proto.STRING, - number=1, - ) - description = proto.Field( - proto.STRING, - number=2, - ) - large_custom_dictionary = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message='LargeCustomDictionaryConfig', - ) - dictionary = proto.Field( - proto.MESSAGE, - number=4, - oneof='type', - message=storage.CustomInfoType.Dictionary, - ) - regex = proto.Field( - proto.MESSAGE, - number=5, - oneof='type', - message=storage.CustomInfoType.Regex, - ) - - -class StoredInfoTypeStats(proto.Message): - r"""Statistics for a StoredInfoType. - Attributes: - large_custom_dictionary (google.cloud.dlp_v2.types.LargeCustomDictionaryStats): - StoredInfoType where findings are defined by - a dictionary of phrases. - """ - - large_custom_dictionary = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message='LargeCustomDictionaryStats', - ) - - -class StoredInfoTypeVersion(proto.Message): - r"""Version of a StoredInfoType, including the configuration used - to build it, create timestamp, and current state. - - Attributes: - config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): - StoredInfoType configuration. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Create timestamp of the version. Read-only, - determined by the system when the version is - created. - state (google.cloud.dlp_v2.types.StoredInfoTypeState): - Stored info type version state. Read-only, - updated by the system during dictionary - creation. - errors (Sequence[google.cloud.dlp_v2.types.Error]): - Errors that occurred when creating this storedInfoType - version, or anomalies detected in the storedInfoType data - that render it unusable. Only the five most recent errors - will be displayed, with the most recent error appearing - first. - - For example, some of the data for stored custom dictionaries - is put in the user's Google Cloud Storage bucket, and if - this data is modified or deleted by the user or another - system, the dictionary becomes invalid. - - If any errors occur, fix the problem indicated by the error - message and use the UpdateStoredInfoType API method to - create another version of the storedInfoType to continue - using it, reusing the same ``config`` if it was not the - source of the error. - stats (google.cloud.dlp_v2.types.StoredInfoTypeStats): - Statistics about this storedInfoType version. - """ - - config = proto.Field( - proto.MESSAGE, - number=1, - message='StoredInfoTypeConfig', - ) - create_time = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - state = proto.Field( - proto.ENUM, - number=3, - enum='StoredInfoTypeState', - ) - errors = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='Error', - ) - stats = proto.Field( - proto.MESSAGE, - number=5, - message='StoredInfoTypeStats', - ) - - -class StoredInfoType(proto.Message): - r"""StoredInfoType resource message that contains information - about the current version and any pending updates. - - Attributes: - name (str): - Resource name. - current_version (google.cloud.dlp_v2.types.StoredInfoTypeVersion): - Current version of the stored info type. - pending_versions (Sequence[google.cloud.dlp_v2.types.StoredInfoTypeVersion]): - Pending versions of the stored info type. - Empty if no versions are pending. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - current_version = proto.Field( - proto.MESSAGE, - number=2, - message='StoredInfoTypeVersion', - ) - pending_versions = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='StoredInfoTypeVersion', - ) - - -class CreateStoredInfoTypeRequest(proto.Message): - r"""Request message for CreateStoredInfoType. - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization) and whether you have - `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults to - global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): - Required. Configuration of the storedInfoType - to create. - stored_info_type_id (str): - The storedInfoType ID can contain uppercase and lowercase - letters, numbers, and hyphens; that is, it must match the - regular expression: ``[a-zA-Z\d-_]+``. The maximum length is - 100 characters. Can be empty to allow the system to generate - one. - location_id (str): - Deprecated. This field has no effect. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - config = proto.Field( - proto.MESSAGE, - number=2, - message='StoredInfoTypeConfig', - ) - stored_info_type_id = proto.Field( - proto.STRING, - number=3, - ) - location_id = proto.Field( - proto.STRING, - number=4, - ) - - -class UpdateStoredInfoTypeRequest(proto.Message): - r"""Request message for UpdateStoredInfoType. - Attributes: - name (str): - Required. Resource name of organization and storedInfoType - to be updated, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - config (google.cloud.dlp_v2.types.StoredInfoTypeConfig): - Updated configuration for the storedInfoType. - If not provided, a new version of the - storedInfoType will be created with the existing - configuration. - update_mask (google.protobuf.field_mask_pb2.FieldMask): - Mask to control which fields get updated. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - config = proto.Field( - proto.MESSAGE, - number=2, - message='StoredInfoTypeConfig', - ) - update_mask = proto.Field( - proto.MESSAGE, - number=3, - message=field_mask_pb2.FieldMask, - ) - - -class GetStoredInfoTypeRequest(proto.Message): - r"""Request message for GetStoredInfoType. - Attributes: - name (str): - Required. Resource name of the organization and - storedInfoType to be read, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class ListStoredInfoTypesRequest(proto.Message): - r"""Request message for ListStoredInfoTypes. - Attributes: - parent (str): - Required. Parent resource name. - - The format of this value varies depending on the scope of - the request (project or organization) and whether you have - `specified a processing - location `__: - - - Projects scope, location specified: - ``projects/``\ PROJECT_ID\ ``/locations/``\ LOCATION_ID - - Projects scope, no location specified (defaults to - global): ``projects/``\ PROJECT_ID - - Organizations scope, location specified: - ``organizations/``\ ORG_ID\ ``/locations/``\ LOCATION_ID - - Organizations scope, no location specified (defaults to - global): ``organizations/``\ ORG_ID - - The following example ``parent`` string specifies a parent - project with the identifier ``example-project``, and - specifies the ``europe-west3`` location for processing data: - - :: - - parent=projects/example-project/locations/europe-west3 - page_token (str): - Page token to continue retrieval. Comes from previous call - to ``ListStoredInfoTypes``. - page_size (int): - Size of the page, can be limited by server. - If zero server returns a page of max size 100. - order_by (str): - Comma separated list of fields to order by, followed by - ``asc`` or ``desc`` postfix. This list is case-insensitive, - default sorting order is ascending, redundant space - characters are insignificant. - - Example: ``name asc, display_name, create_time desc`` - - Supported fields are: - - - ``create_time``: corresponds to time the most recent - version of the resource was created. - - ``state``: corresponds to the state of the resource. - - ``name``: corresponds to resource name. - - ``display_name``: corresponds to info type's display - name. - location_id (str): - Deprecated. This field has no effect. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - page_token = proto.Field( - proto.STRING, - number=2, - ) - page_size = proto.Field( - proto.INT32, - number=3, - ) - order_by = proto.Field( - proto.STRING, - number=4, - ) - location_id = proto.Field( - proto.STRING, - number=5, - ) - - -class ListStoredInfoTypesResponse(proto.Message): - r"""Response message for ListStoredInfoTypes. - Attributes: - stored_info_types (Sequence[google.cloud.dlp_v2.types.StoredInfoType]): - List of storedInfoTypes, up to page_size in - ListStoredInfoTypesRequest. - next_page_token (str): - If the next page is available then the next - page token to be used in following - ListStoredInfoTypes request. - """ - - @property - def raw_page(self): - return self - - stored_info_types = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='StoredInfoType', - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class DeleteStoredInfoTypeRequest(proto.Message): - r"""Request message for DeleteStoredInfoType. - Attributes: - name (str): - Required. Resource name of the organization and - storedInfoType to be deleted, for example - ``organizations/433245324/storedInfoTypes/432452342`` or - projects/project-id/storedInfoTypes/432452342. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class HybridInspectJobTriggerRequest(proto.Message): - r"""Request to search for potentially sensitive info in a custom - location. - - Attributes: - name (str): - Required. Resource name of the trigger to execute a hybrid - inspect on, for example - ``projects/dlp-test-project/jobTriggers/53234423``. - hybrid_item (google.cloud.dlp_v2.types.HybridContentItem): - The item to inspect. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - hybrid_item = proto.Field( - proto.MESSAGE, - number=3, - message='HybridContentItem', - ) - - -class HybridInspectDlpJobRequest(proto.Message): - r"""Request to search for potentially sensitive info in a custom - location. - - Attributes: - name (str): - Required. Resource name of the job to execute a hybrid - inspect on, for example - ``projects/dlp-test-project/dlpJob/53234423``. - hybrid_item (google.cloud.dlp_v2.types.HybridContentItem): - The item to inspect. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - hybrid_item = proto.Field( - proto.MESSAGE, - number=3, - message='HybridContentItem', - ) - - -class HybridContentItem(proto.Message): - r"""An individual hybrid item to inspect. Will be stored - temporarily during processing. - - Attributes: - item (google.cloud.dlp_v2.types.ContentItem): - The item to inspect. - finding_details (google.cloud.dlp_v2.types.HybridFindingDetails): - Supplementary information that will be added - to each finding. - """ - - item = proto.Field( - proto.MESSAGE, - number=1, - message='ContentItem', - ) - finding_details = proto.Field( - proto.MESSAGE, - number=2, - message='HybridFindingDetails', - ) - - -class HybridFindingDetails(proto.Message): - r"""Populate to associate additional data with each finding. - Attributes: - container_details (google.cloud.dlp_v2.types.Container): - Details about the container where the content - being inspected is from. - file_offset (int): - Offset in bytes of the line, from the - beginning of the file, where the finding is - located. Populate if the item being scanned is - only part of a bigger item, such as a shard of a - file and you want to track the absolute position - of the finding. - row_offset (int): - Offset of the row for tables. Populate if the - row(s) being scanned are part of a bigger - dataset and you want to keep track of their - absolute position. - table_options (google.cloud.dlp_v2.types.TableOptions): - If the container is a table, additional information to make - findings meaningful such as the columns that are primary - keys. If not known ahead of time, can also be set within - each inspect hybrid call and the two will be merged. Note - that identifying_fields will only be stored to BigQuery, and - only if the BigQuery action has been included. - labels (Sequence[google.cloud.dlp_v2.types.HybridFindingDetails.LabelsEntry]): - Labels to represent user provided metadata about the data - being inspected. If configured by the job, some key values - may be required. The labels associated with ``Finding``'s - produced by hybrid inspection. - - Label keys must be between 1 and 63 characters long and must - conform to the following regular expression: - ``[a-z]([-a-z0-9]*[a-z0-9])?``. - - Label values must be between 0 and 63 characters long and - must conform to the regular expression - ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. - - No more than 10 labels can be associated with a given - finding. - - Examples: - - - ``"environment" : "production"`` - - ``"pipeline" : "etl"`` - """ - - container_details = proto.Field( - proto.MESSAGE, - number=1, - message='Container', - ) - file_offset = proto.Field( - proto.INT64, - number=2, - ) - row_offset = proto.Field( - proto.INT64, - number=3, - ) - table_options = proto.Field( - proto.MESSAGE, - number=4, - message=storage.TableOptions, - ) - labels = proto.MapField( - proto.STRING, - proto.STRING, - number=5, - ) - - -class HybridInspectResponse(proto.Message): - r"""Quota exceeded errors will be thrown once quota has been met. """ - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py b/owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py deleted file mode 100644 index 9d33cb03..00000000 --- a/owl-bot-staging/v2/google/cloud/dlp_v2/types/storage.py +++ /dev/null @@ -1,1202 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.privacy.dlp.v2', - manifest={ - 'Likelihood', - 'FileType', - 'InfoType', - 'StoredType', - 'CustomInfoType', - 'FieldId', - 'PartitionId', - 'KindExpression', - 'DatastoreOptions', - 'CloudStorageRegexFileSet', - 'CloudStorageOptions', - 'CloudStorageFileSet', - 'CloudStoragePath', - 'BigQueryOptions', - 'StorageConfig', - 'HybridOptions', - 'BigQueryKey', - 'DatastoreKey', - 'Key', - 'RecordKey', - 'BigQueryTable', - 'BigQueryField', - 'EntityId', - 'TableOptions', - }, -) - - -class Likelihood(proto.Enum): - r"""Categorization of results based on how likely they are to - represent a match, based on the number of elements they contain - which imply a match. - """ - LIKELIHOOD_UNSPECIFIED = 0 - VERY_UNLIKELY = 1 - UNLIKELY = 2 - POSSIBLE = 3 - LIKELY = 4 - VERY_LIKELY = 5 - - -class FileType(proto.Enum): - r"""Definitions of file type groups to scan. New types will be - added to this list. - """ - FILE_TYPE_UNSPECIFIED = 0 - BINARY_FILE = 1 - TEXT_FILE = 2 - IMAGE = 3 - WORD = 5 - PDF = 6 - AVRO = 7 - CSV = 8 - TSV = 9 - - -class InfoType(proto.Message): - r"""Type of information detected by the API. - Attributes: - name (str): - Name of the information type. Either a name of your choosing - when creating a CustomInfoType, or one of the names listed - at https://cloud.google.com/dlp/docs/infotypes-reference - when specifying a built-in type. When sending Cloud DLP - results to Data Catalog, infoType names should conform to - the pattern ``[A-Za-z0-9$-_]{1,64}``. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class StoredType(proto.Message): - r"""A reference to a StoredInfoType to use with scanning. - Attributes: - name (str): - Resource name of the requested ``StoredInfoType``, for - example - ``organizations/433245324/storedInfoTypes/432452342`` or - ``projects/project-id/storedInfoTypes/432452342``. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Timestamp indicating when the version of the - ``StoredInfoType`` used for inspection was created. - Output-only field, populated by the system. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - create_time = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - - -class CustomInfoType(proto.Message): - r"""Custom information type provided by the user. Used to find - domain-specific sensitive information configurable to the data - in question. - - Attributes: - info_type (google.cloud.dlp_v2.types.InfoType): - CustomInfoType can either be a new infoType, or an extension - of built-in infoType, when the name matches one of existing - infoTypes and that infoType is specified in - ``InspectContent.info_types`` field. Specifying the latter - adds findings to the one detected by the system. If built-in - info type is not specified in ``InspectContent.info_types`` - list then the name is treated as a custom info type. - likelihood (google.cloud.dlp_v2.types.Likelihood): - Likelihood to return for this CustomInfoType. This base - value can be altered by a detection rule if the finding - meets the criteria specified by the rule. Defaults to - ``VERY_LIKELY`` if not specified. - dictionary (google.cloud.dlp_v2.types.CustomInfoType.Dictionary): - A list of phrases to detect as a - CustomInfoType. - regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): - Regular expression based CustomInfoType. - surrogate_type (google.cloud.dlp_v2.types.CustomInfoType.SurrogateType): - Message for detecting output from - deidentification transformations that support - reversing. - stored_type (google.cloud.dlp_v2.types.StoredType): - Load an existing ``StoredInfoType`` resource for use in - ``InspectDataSource``. Not currently supported in - ``InspectContent``. - detection_rules (Sequence[google.cloud.dlp_v2.types.CustomInfoType.DetectionRule]): - Set of detection rules to apply to all findings of this - CustomInfoType. Rules are applied in order that they are - specified. Not supported for the ``surrogate_type`` - CustomInfoType. - exclusion_type (google.cloud.dlp_v2.types.CustomInfoType.ExclusionType): - If set to EXCLUSION_TYPE_EXCLUDE this infoType will not - cause a finding to be returned. It still can be used for - rules matching. - """ - class ExclusionType(proto.Enum): - r"""""" - EXCLUSION_TYPE_UNSPECIFIED = 0 - EXCLUSION_TYPE_EXCLUDE = 1 - - class Dictionary(proto.Message): - r"""Custom information type based on a dictionary of words or phrases. - This can be used to match sensitive information specific to the - data, such as a list of employee IDs or job titles. - - Dictionary words are case-insensitive and all characters other than - letters and digits in the unicode `Basic Multilingual - Plane `__ - will be replaced with whitespace when scanning for matches, so the - dictionary phrase "Sam Johnson" will match all three phrases "sam - johnson", "Sam, Johnson", and "Sam (Johnson)". Additionally, the - characters surrounding any match must be of a different type than - the adjacent characters within the word, so letters must be next to - non-letters and digits next to non-digits. For example, the - dictionary word "jen" will match the first three letters of the text - "jen123" but will return no matches for "jennifer". - - Dictionary words containing a large number of characters that are - not letters or digits may result in unexpected findings because such - characters are treated as whitespace. The - `limits `__ page contains - details about the size limits of dictionaries. For dictionaries that - do not fit within these constraints, consider using - ``LargeCustomDictionaryConfig`` in the ``StoredInfoType`` API. - - Attributes: - word_list (google.cloud.dlp_v2.types.CustomInfoType.Dictionary.WordList): - List of words or phrases to search for. - cloud_storage_path (google.cloud.dlp_v2.types.CloudStoragePath): - Newline-delimited file of words in Cloud - Storage. Only a single file is accepted. - """ - - class WordList(proto.Message): - r"""Message defining a list of words or phrases to search for in - the data. - - Attributes: - words (Sequence[str]): - Words or phrases defining the dictionary. The dictionary - must contain at least one phrase and every phrase must - contain at least 2 characters that are letters or digits. - [required] - """ - - words = proto.RepeatedField( - proto.STRING, - number=1, - ) - - word_list = proto.Field( - proto.MESSAGE, - number=1, - oneof='source', - message='CustomInfoType.Dictionary.WordList', - ) - cloud_storage_path = proto.Field( - proto.MESSAGE, - number=3, - oneof='source', - message='CloudStoragePath', - ) - - class Regex(proto.Message): - r"""Message defining a custom regular expression. - Attributes: - pattern (str): - Pattern defining the regular expression. Its - syntax - (https://github.com/google/re2/wiki/Syntax) can - be found under the google/re2 repository on - GitHub. - group_indexes (Sequence[int]): - The index of the submatch to extract as - findings. When not specified, the entire match - is returned. No more than 3 may be included. - """ - - pattern = proto.Field( - proto.STRING, - number=1, - ) - group_indexes = proto.RepeatedField( - proto.INT32, - number=2, - ) - - class SurrogateType(proto.Message): - r"""Message for detecting output from deidentification transformations - such as - ```CryptoReplaceFfxFpeConfig`` `__. - These types of transformations are those that perform - pseudonymization, thereby producing a "surrogate" as output. This - should be used in conjunction with a field on the transformation - such as ``surrogate_info_type``. This CustomInfoType does not - support the use of ``detection_rules``. - """ - - class DetectionRule(proto.Message): - r"""Deprecated; use ``InspectionRuleSet`` instead. Rule for modifying a - ``CustomInfoType`` to alter behavior under certain circumstances, - depending on the specific details of the rule. Not supported for the - ``surrogate_type`` custom infoType. - - Attributes: - hotword_rule (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.HotwordRule): - Hotword-based detection rule. - """ - - class Proximity(proto.Message): - r"""Message for specifying a window around a finding to apply a - detection rule. - - Attributes: - window_before (int): - Number of characters before the finding to - consider. - window_after (int): - Number of characters after the finding to - consider. - """ - - window_before = proto.Field( - proto.INT32, - number=1, - ) - window_after = proto.Field( - proto.INT32, - number=2, - ) - - class LikelihoodAdjustment(proto.Message): - r"""Message for specifying an adjustment to the likelihood of a - finding as part of a detection rule. - - Attributes: - fixed_likelihood (google.cloud.dlp_v2.types.Likelihood): - Set the likelihood of a finding to a fixed - value. - relative_likelihood (int): - Increase or decrease the likelihood by the specified number - of levels. For example, if a finding would be ``POSSIBLE`` - without the detection rule and ``relative_likelihood`` is 1, - then it is upgraded to ``LIKELY``, while a value of -1 would - downgrade it to ``UNLIKELY``. Likelihood may never drop - below ``VERY_UNLIKELY`` or exceed ``VERY_LIKELY``, so - applying an adjustment of 1 followed by an adjustment of -1 - when base likelihood is ``VERY_LIKELY`` will result in a - final likelihood of ``LIKELY``. - """ - - fixed_likelihood = proto.Field( - proto.ENUM, - number=1, - oneof='adjustment', - enum='Likelihood', - ) - relative_likelihood = proto.Field( - proto.INT32, - number=2, - oneof='adjustment', - ) - - class HotwordRule(proto.Message): - r"""The rule that adjusts the likelihood of findings within a - certain proximity of hotwords. - - Attributes: - hotword_regex (google.cloud.dlp_v2.types.CustomInfoType.Regex): - Regular expression pattern defining what - qualifies as a hotword. - proximity (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.Proximity): - Proximity of the finding within which the - entire hotword must reside. The total length of - the window cannot exceed 1000 characters. Note - that the finding itself will be included in the - window, so that hotwords may be used to match - substrings of the finding itself. For example, - the certainty of a phone number regex "\(\d{3}\) - \d{3}-\d{4}" could be adjusted upwards if the - area code is known to be the local area code of - a company office using the hotword regex - "\(xxx\)", where "xxx" is the area code in - question. - likelihood_adjustment (google.cloud.dlp_v2.types.CustomInfoType.DetectionRule.LikelihoodAdjustment): - Likelihood adjustment to apply to all - matching findings. - """ - - hotword_regex = proto.Field( - proto.MESSAGE, - number=1, - message='CustomInfoType.Regex', - ) - proximity = proto.Field( - proto.MESSAGE, - number=2, - message='CustomInfoType.DetectionRule.Proximity', - ) - likelihood_adjustment = proto.Field( - proto.MESSAGE, - number=3, - message='CustomInfoType.DetectionRule.LikelihoodAdjustment', - ) - - hotword_rule = proto.Field( - proto.MESSAGE, - number=1, - oneof='type', - message='CustomInfoType.DetectionRule.HotwordRule', - ) - - info_type = proto.Field( - proto.MESSAGE, - number=1, - message='InfoType', - ) - likelihood = proto.Field( - proto.ENUM, - number=6, - enum='Likelihood', - ) - dictionary = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message=Dictionary, - ) - regex = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message=Regex, - ) - surrogate_type = proto.Field( - proto.MESSAGE, - number=4, - oneof='type', - message=SurrogateType, - ) - stored_type = proto.Field( - proto.MESSAGE, - number=5, - oneof='type', - message='StoredType', - ) - detection_rules = proto.RepeatedField( - proto.MESSAGE, - number=7, - message=DetectionRule, - ) - exclusion_type = proto.Field( - proto.ENUM, - number=8, - enum=ExclusionType, - ) - - -class FieldId(proto.Message): - r"""General identifier of a data field in a storage service. - Attributes: - name (str): - Name describing the field. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class PartitionId(proto.Message): - r"""Datastore partition ID. - A partition ID identifies a grouping of entities. The grouping - is always by project and namespace, however the namespace ID may - be empty. - A partition ID contains several dimensions: - project ID and namespace ID. - - Attributes: - project_id (str): - The ID of the project to which the entities - belong. - namespace_id (str): - If not empty, the ID of the namespace to - which the entities belong. - """ - - project_id = proto.Field( - proto.STRING, - number=2, - ) - namespace_id = proto.Field( - proto.STRING, - number=4, - ) - - -class KindExpression(proto.Message): - r"""A representation of a Datastore kind. - Attributes: - name (str): - The name of the kind. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class DatastoreOptions(proto.Message): - r"""Options defining a data set within Google Cloud Datastore. - Attributes: - partition_id (google.cloud.dlp_v2.types.PartitionId): - A partition ID identifies a grouping of - entities. The grouping is always by project and - namespace, however the namespace ID may be - empty. - kind (google.cloud.dlp_v2.types.KindExpression): - The kind to process. - """ - - partition_id = proto.Field( - proto.MESSAGE, - number=1, - message='PartitionId', - ) - kind = proto.Field( - proto.MESSAGE, - number=2, - message='KindExpression', - ) - - -class CloudStorageRegexFileSet(proto.Message): - r"""Message representing a set of files in a Cloud Storage bucket. - Regular expressions are used to allow fine-grained control over - which files in the bucket to include. - - Included files are those that match at least one item in - ``include_regex`` and do not match any items in ``exclude_regex``. - Note that a file that matches items from both lists will *not* be - included. For a match to occur, the entire file path (i.e., - everything in the url after the bucket name) must match the regular - expression. - - For example, given the input - ``{bucket_name: "mybucket", include_regex: ["directory1/.*"], exclude_regex: ["directory1/excluded.*"]}``: - - - ``gs://mybucket/directory1/myfile`` will be included - - ``gs://mybucket/directory1/directory2/myfile`` will be included - (``.*`` matches across ``/``) - - ``gs://mybucket/directory0/directory1/myfile`` will *not* be - included (the full path doesn't match any items in - ``include_regex``) - - ``gs://mybucket/directory1/excludedfile`` will *not* be included - (the path matches an item in ``exclude_regex``) - - If ``include_regex`` is left empty, it will match all files by - default (this is equivalent to setting ``include_regex: [".*"]``). - - Some other common use cases: - - - ``{bucket_name: "mybucket", exclude_regex: [".*\.pdf"]}`` will - include all files in ``mybucket`` except for .pdf files - - ``{bucket_name: "mybucket", include_regex: ["directory/[^/]+"]}`` - will include all files directly under - ``gs://mybucket/directory/``, without matching across ``/`` - - Attributes: - bucket_name (str): - The name of a Cloud Storage bucket. Required. - include_regex (Sequence[str]): - A list of regular expressions matching file paths to - include. All files in the bucket that match at least one of - these regular expressions will be included in the set of - files, except for those that also match an item in - ``exclude_regex``. Leaving this field empty will match all - files by default (this is equivalent to including ``.*`` in - the list). - - Regular expressions use RE2 - `syntax `__; a - guide can be found under the google/re2 repository on - GitHub. - exclude_regex (Sequence[str]): - A list of regular expressions matching file paths to - exclude. All files in the bucket that match at least one of - these regular expressions will be excluded from the scan. - - Regular expressions use RE2 - `syntax `__; a - guide can be found under the google/re2 repository on - GitHub. - """ - - bucket_name = proto.Field( - proto.STRING, - number=1, - ) - include_regex = proto.RepeatedField( - proto.STRING, - number=2, - ) - exclude_regex = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class CloudStorageOptions(proto.Message): - r"""Options defining a file or a set of files within a Google - Cloud Storage bucket. - - Attributes: - file_set (google.cloud.dlp_v2.types.CloudStorageOptions.FileSet): - The set of one or more files to scan. - bytes_limit_per_file (int): - Max number of bytes to scan from a file. If a scanned file's - size is bigger than this value then the rest of the bytes - are omitted. Only one of bytes_limit_per_file and - bytes_limit_per_file_percent can be specified. - bytes_limit_per_file_percent (int): - Max percentage of bytes to scan from a file. The rest are - omitted. The number of bytes scanned is rounded down. Must - be between 0 and 100, inclusively. Both 0 and 100 means no - limit. Defaults to 0. Only one of bytes_limit_per_file and - bytes_limit_per_file_percent can be specified. - file_types (Sequence[google.cloud.dlp_v2.types.FileType]): - List of file type groups to include in the scan. If empty, - all files are scanned and available data format processors - are applied. In addition, the binary content of the selected - files is always scanned as well. Images are scanned only as - binary if the specified region does not support image - inspection and no file_types were specified. Image - inspection is restricted to 'global', 'us', 'asia', and - 'europe'. - sample_method (google.cloud.dlp_v2.types.CloudStorageOptions.SampleMethod): - - files_limit_percent (int): - Limits the number of files to scan to this - percentage of the input FileSet. Number of files - scanned is rounded down. Must be between 0 and - 100, inclusively. Both 0 and 100 means no limit. - Defaults to 0. - """ - class SampleMethod(proto.Enum): - r"""How to sample bytes if not all bytes are scanned. Meaningful only - when used in conjunction with bytes_limit_per_file. If not - specified, scanning would start from the top. - """ - SAMPLE_METHOD_UNSPECIFIED = 0 - TOP = 1 - RANDOM_START = 2 - - class FileSet(proto.Message): - r"""Set of files to scan. - Attributes: - url (str): - The Cloud Storage url of the file(s) to scan, in the format - ``gs:///``. Trailing wildcard in the path is - allowed. - - If the url ends in a trailing slash, the bucket or directory - represented by the url will be scanned non-recursively - (content in sub-directories will not be scanned). This means - that ``gs://mybucket/`` is equivalent to - ``gs://mybucket/*``, and ``gs://mybucket/directory/`` is - equivalent to ``gs://mybucket/directory/*``. - - Exactly one of ``url`` or ``regex_file_set`` must be set. - regex_file_set (google.cloud.dlp_v2.types.CloudStorageRegexFileSet): - The regex-filtered set of files to scan. Exactly one of - ``url`` or ``regex_file_set`` must be set. - """ - - url = proto.Field( - proto.STRING, - number=1, - ) - regex_file_set = proto.Field( - proto.MESSAGE, - number=2, - message='CloudStorageRegexFileSet', - ) - - file_set = proto.Field( - proto.MESSAGE, - number=1, - message=FileSet, - ) - bytes_limit_per_file = proto.Field( - proto.INT64, - number=4, - ) - bytes_limit_per_file_percent = proto.Field( - proto.INT32, - number=8, - ) - file_types = proto.RepeatedField( - proto.ENUM, - number=5, - enum='FileType', - ) - sample_method = proto.Field( - proto.ENUM, - number=6, - enum=SampleMethod, - ) - files_limit_percent = proto.Field( - proto.INT32, - number=7, - ) - - -class CloudStorageFileSet(proto.Message): - r"""Message representing a set of files in Cloud Storage. - Attributes: - url (str): - The url, in the format ``gs:///``. Trailing - wildcard in the path is allowed. - """ - - url = proto.Field( - proto.STRING, - number=1, - ) - - -class CloudStoragePath(proto.Message): - r"""Message representing a single file or path in Cloud Storage. - Attributes: - path (str): - A url representing a file or path (no wildcards) in Cloud - Storage. Example: gs://[BUCKET_NAME]/dictionary.txt - """ - - path = proto.Field( - proto.STRING, - number=1, - ) - - -class BigQueryOptions(proto.Message): - r"""Options defining BigQuery table and row identifiers. - Attributes: - table_reference (google.cloud.dlp_v2.types.BigQueryTable): - Complete BigQuery table reference. - identifying_fields (Sequence[google.cloud.dlp_v2.types.FieldId]): - Table fields that may uniquely identify a row within the - table. When ``actions.saveFindings.outputConfig.table`` is - specified, the values of columns specified here are - available in the output table under - ``location.content_locations.record_location.record_key.id_values``. - Nested fields such as ``person.birthdate.year`` are allowed. - rows_limit (int): - Max number of rows to scan. If the table has more rows than - this value, the rest of the rows are omitted. If not set, or - if set to 0, all rows will be scanned. Only one of - rows_limit and rows_limit_percent can be specified. Cannot - be used in conjunction with TimespanConfig. - rows_limit_percent (int): - Max percentage of rows to scan. The rest are omitted. The - number of rows scanned is rounded down. Must be between 0 - and 100, inclusively. Both 0 and 100 means no limit. - Defaults to 0. Only one of rows_limit and rows_limit_percent - can be specified. Cannot be used in conjunction with - TimespanConfig. - sample_method (google.cloud.dlp_v2.types.BigQueryOptions.SampleMethod): - - excluded_fields (Sequence[google.cloud.dlp_v2.types.FieldId]): - References to fields excluded from scanning. - This allows you to skip inspection of entire - columns which you know have no findings. - """ - class SampleMethod(proto.Enum): - r"""How to sample rows if not all rows are scanned. Meaningful only when - used in conjunction with either rows_limit or rows_limit_percent. If - not specified, rows are scanned in the order BigQuery reads them. - """ - SAMPLE_METHOD_UNSPECIFIED = 0 - TOP = 1 - RANDOM_START = 2 - - table_reference = proto.Field( - proto.MESSAGE, - number=1, - message='BigQueryTable', - ) - identifying_fields = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='FieldId', - ) - rows_limit = proto.Field( - proto.INT64, - number=3, - ) - rows_limit_percent = proto.Field( - proto.INT32, - number=6, - ) - sample_method = proto.Field( - proto.ENUM, - number=4, - enum=SampleMethod, - ) - excluded_fields = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='FieldId', - ) - - -class StorageConfig(proto.Message): - r"""Shared message indicating Cloud storage type. - Attributes: - datastore_options (google.cloud.dlp_v2.types.DatastoreOptions): - Google Cloud Datastore options. - cloud_storage_options (google.cloud.dlp_v2.types.CloudStorageOptions): - Google Cloud Storage options. - big_query_options (google.cloud.dlp_v2.types.BigQueryOptions): - BigQuery options. - hybrid_options (google.cloud.dlp_v2.types.HybridOptions): - Hybrid inspection options. - Early access feature is in a pre-release state - and might change or have limited support. For - more information, see - https://cloud.google.com/products#product- - launch-stages. - timespan_config (google.cloud.dlp_v2.types.StorageConfig.TimespanConfig): - - """ - - class TimespanConfig(proto.Message): - r"""Configuration of the timespan of the items to include in - scanning. Currently only supported when inspecting Google Cloud - Storage and BigQuery. - - Attributes: - start_time (google.protobuf.timestamp_pb2.Timestamp): - Exclude files, tables, or rows older than - this value. If not set, no lower time limit is - applied. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Exclude files, tables, or rows newer than - this value. If not set, no upper time limit is - applied. - timestamp_field (google.cloud.dlp_v2.types.FieldId): - Specification of the field containing the timestamp of - scanned items. Used for data sources like Datastore and - BigQuery. - - For BigQuery: If this value is not specified and the table - was modified between the given start and end times, the - entire table will be scanned. If this value is specified, - then rows are filtered based on the given start and end - times. Rows with a ``NULL`` value in the provided BigQuery - column are skipped. Valid data types of the provided - BigQuery column are: ``INTEGER``, ``DATE``, ``TIMESTAMP``, - and ``DATETIME``. - - For Datastore: If this value is specified, then entities are - filtered based on the given start and end times. If an - entity does not contain the provided timestamp property or - contains empty or invalid values, then it is included. Valid - data types of the provided timestamp property are: - ``TIMESTAMP``. - enable_auto_population_of_timespan_config (bool): - When the job is started by a JobTrigger we will - automatically figure out a valid start_time to avoid - scanning files that have not been modified since the last - time the JobTrigger executed. This will be based on the time - of the execution of the last run of the JobTrigger. - """ - - start_time = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - end_time = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - timestamp_field = proto.Field( - proto.MESSAGE, - number=3, - message='FieldId', - ) - enable_auto_population_of_timespan_config = proto.Field( - proto.BOOL, - number=4, - ) - - datastore_options = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message='DatastoreOptions', - ) - cloud_storage_options = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message='CloudStorageOptions', - ) - big_query_options = proto.Field( - proto.MESSAGE, - number=4, - oneof='type', - message='BigQueryOptions', - ) - hybrid_options = proto.Field( - proto.MESSAGE, - number=9, - oneof='type', - message='HybridOptions', - ) - timespan_config = proto.Field( - proto.MESSAGE, - number=6, - message=TimespanConfig, - ) - - -class HybridOptions(proto.Message): - r"""Configuration to control jobs where the content being - inspected is outside of Google Cloud Platform. - - Attributes: - description (str): - A short description of where the data is - coming from. Will be stored once in the job. 256 - max length. - required_finding_label_keys (Sequence[str]): - These are labels that each inspection request must include - within their 'finding_labels' map. Request may contain - others, but any missing one of these will be rejected. - - Label keys must be between 1 and 63 characters long and must - conform to the following regular expression: - ``[a-z]([-a-z0-9]*[a-z0-9])?``. - - No more than 10 keys can be required. - labels (Sequence[google.cloud.dlp_v2.types.HybridOptions.LabelsEntry]): - To organize findings, these labels will be added to each - finding. - - Label keys must be between 1 and 63 characters long and must - conform to the following regular expression: - ``[a-z]([-a-z0-9]*[a-z0-9])?``. - - Label values must be between 0 and 63 characters long and - must conform to the regular expression - ``([a-z]([-a-z0-9]*[a-z0-9])?)?``. - - No more than 10 labels can be associated with a given - finding. - - Examples: - - - ``"environment" : "production"`` - - ``"pipeline" : "etl"`` - table_options (google.cloud.dlp_v2.types.TableOptions): - If the container is a table, additional - information to make findings meaningful such as - the columns that are primary keys. - """ - - description = proto.Field( - proto.STRING, - number=1, - ) - required_finding_label_keys = proto.RepeatedField( - proto.STRING, - number=2, - ) - labels = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - table_options = proto.Field( - proto.MESSAGE, - number=4, - message='TableOptions', - ) - - -class BigQueryKey(proto.Message): - r"""Row key for identifying a record in BigQuery table. - Attributes: - table_reference (google.cloud.dlp_v2.types.BigQueryTable): - Complete BigQuery table reference. - row_number (int): - Row number inferred at the time the table was scanned. This - value is nondeterministic, cannot be queried, and may be - null for inspection jobs. To locate findings within a table, - specify - ``inspect_job.storage_config.big_query_options.identifying_fields`` - in ``CreateDlpJobRequest``. - """ - - table_reference = proto.Field( - proto.MESSAGE, - number=1, - message='BigQueryTable', - ) - row_number = proto.Field( - proto.INT64, - number=2, - ) - - -class DatastoreKey(proto.Message): - r"""Record key for a finding in Cloud Datastore. - Attributes: - entity_key (google.cloud.dlp_v2.types.Key): - Datastore entity key. - """ - - entity_key = proto.Field( - proto.MESSAGE, - number=1, - message='Key', - ) - - -class Key(proto.Message): - r"""A unique identifier for a Datastore entity. - If a key's partition ID or any of its path kinds or names are - reserved/read-only, the key is reserved/read-only. - A reserved/read-only key is forbidden in certain documented - contexts. - - Attributes: - partition_id (google.cloud.dlp_v2.types.PartitionId): - Entities are partitioned into subsets, - currently identified by a project ID and - namespace ID. Queries are scoped to a single - partition. - path (Sequence[google.cloud.dlp_v2.types.Key.PathElement]): - The entity path. An entity path consists of one or more - elements composed of a kind and a string or numerical - identifier, which identify entities. The first element - identifies a *root entity*, the second element identifies a - *child* of the root entity, the third element identifies a - child of the second entity, and so forth. The entities - identified by all prefixes of the path are called the - element's *ancestors*. - - A path can never be empty, and a path can have at most 100 - elements. - """ - - class PathElement(proto.Message): - r"""A (kind, ID/name) pair used to construct a key path. - If either name or ID is set, the element is complete. If neither - is set, the element is incomplete. - - Attributes: - kind (str): - The kind of the entity. A kind matching regex ``__.*__`` is - reserved/read-only. A kind must not contain more than 1500 - bytes when UTF-8 encoded. Cannot be ``""``. - id (int): - The auto-allocated ID of the entity. - Never equal to zero. Values less than zero are - discouraged and may not be supported in the - future. - name (str): - The name of the entity. A name matching regex ``__.*__`` is - reserved/read-only. A name must not be more than 1500 bytes - when UTF-8 encoded. Cannot be ``""``. - """ - - kind = proto.Field( - proto.STRING, - number=1, - ) - id = proto.Field( - proto.INT64, - number=2, - oneof='id_type', - ) - name = proto.Field( - proto.STRING, - number=3, - oneof='id_type', - ) - - partition_id = proto.Field( - proto.MESSAGE, - number=1, - message='PartitionId', - ) - path = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=PathElement, - ) - - -class RecordKey(proto.Message): - r"""Message for a unique key indicating a record that contains a - finding. - - Attributes: - datastore_key (google.cloud.dlp_v2.types.DatastoreKey): - - big_query_key (google.cloud.dlp_v2.types.BigQueryKey): - - id_values (Sequence[str]): - Values of identifying columns in the given row. Order of - values matches the order of ``identifying_fields`` specified - in the scanning request. - """ - - datastore_key = proto.Field( - proto.MESSAGE, - number=2, - oneof='type', - message='DatastoreKey', - ) - big_query_key = proto.Field( - proto.MESSAGE, - number=3, - oneof='type', - message='BigQueryKey', - ) - id_values = proto.RepeatedField( - proto.STRING, - number=5, - ) - - -class BigQueryTable(proto.Message): - r"""Message defining the location of a BigQuery table. A table is - uniquely identified by its project_id, dataset_id, and table_name. - Within a query a table is often referenced with a string in the - format of: ``:.`` or - ``..``. - - Attributes: - project_id (str): - The Google Cloud Platform project ID of the - project containing the table. If omitted, - project ID is inferred from the API call. - dataset_id (str): - Dataset ID of the table. - table_id (str): - Name of the table. - """ - - project_id = proto.Field( - proto.STRING, - number=1, - ) - dataset_id = proto.Field( - proto.STRING, - number=2, - ) - table_id = proto.Field( - proto.STRING, - number=3, - ) - - -class BigQueryField(proto.Message): - r"""Message defining a field of a BigQuery table. - Attributes: - table (google.cloud.dlp_v2.types.BigQueryTable): - Source table of the field. - field (google.cloud.dlp_v2.types.FieldId): - Designated field in the BigQuery table. - """ - - table = proto.Field( - proto.MESSAGE, - number=1, - message='BigQueryTable', - ) - field = proto.Field( - proto.MESSAGE, - number=2, - message='FieldId', - ) - - -class EntityId(proto.Message): - r"""An entity in a dataset is a field or set of fields that correspond - to a single person. For example, in medical records the ``EntityId`` - might be a patient identifier, or for financial records it might be - an account identifier. This message is used when generalizations or - analysis must take into account that multiple rows correspond to the - same entity. - - Attributes: - field (google.cloud.dlp_v2.types.FieldId): - Composite key indicating which field contains - the entity identifier. - """ - - field = proto.Field( - proto.MESSAGE, - number=1, - message='FieldId', - ) - - -class TableOptions(proto.Message): - r"""Instructions regarding the table content being inspected. - Attributes: - identifying_fields (Sequence[google.cloud.dlp_v2.types.FieldId]): - The columns that are the primary keys for - table objects included in ContentItem. A copy of - this cell's value will stored alongside - alongside each finding so that the finding can - be traced to the specific row it came from. No - more than 3 may be provided. - """ - - identifying_fields = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='FieldId', - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v2/mypy.ini b/owl-bot-staging/v2/mypy.ini deleted file mode 100644 index 4505b485..00000000 --- a/owl-bot-staging/v2/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.6 -namespace_packages = True diff --git a/owl-bot-staging/v2/noxfile.py b/owl-bot-staging/v2/noxfile.py deleted file mode 100644 index f4db75d0..00000000 --- a/owl-bot-staging/v2/noxfile.py +++ /dev/null @@ -1,132 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import shutil -import subprocess -import sys - - -import nox # type: ignore - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") - - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", -] - -@nox.session(python=['3.6', '3.7', '3.8', '3.9']) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/dlp_v2/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python='3.7') -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=['3.6', '3.7']) -def mypy(session): - """Run the type checker.""" - session.install('mypy', 'types-pkg_resources') - session.install('.') - session.run( - 'mypy', - '--explicit-package-bases', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python='3.6') -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx<3.0.0", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) diff --git a/owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py b/owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py deleted file mode 100644 index 82a5fabe..00000000 --- a/owl-bot-staging/v2/scripts/fixup_dlp_v2_keywords.py +++ /dev/null @@ -1,209 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class dlpCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'activate_job_trigger': ('name', ), - 'cancel_dlp_job': ('name', ), - 'create_deidentify_template': ('parent', 'deidentify_template', 'template_id', 'location_id', ), - 'create_dlp_job': ('parent', 'inspect_job', 'risk_job', 'job_id', 'location_id', ), - 'create_inspect_template': ('parent', 'inspect_template', 'template_id', 'location_id', ), - 'create_job_trigger': ('parent', 'job_trigger', 'trigger_id', 'location_id', ), - 'create_stored_info_type': ('parent', 'config', 'stored_info_type_id', 'location_id', ), - 'deidentify_content': ('parent', 'deidentify_config', 'inspect_config', 'item', 'inspect_template_name', 'deidentify_template_name', 'location_id', ), - 'delete_deidentify_template': ('name', ), - 'delete_dlp_job': ('name', ), - 'delete_inspect_template': ('name', ), - 'delete_job_trigger': ('name', ), - 'delete_stored_info_type': ('name', ), - 'finish_dlp_job': ('name', ), - 'get_deidentify_template': ('name', ), - 'get_dlp_job': ('name', ), - 'get_inspect_template': ('name', ), - 'get_job_trigger': ('name', ), - 'get_stored_info_type': ('name', ), - 'hybrid_inspect_dlp_job': ('name', 'hybrid_item', ), - 'hybrid_inspect_job_trigger': ('name', 'hybrid_item', ), - 'inspect_content': ('parent', 'inspect_config', 'item', 'inspect_template_name', 'location_id', ), - 'list_deidentify_templates': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), - 'list_dlp_jobs': ('parent', 'filter', 'page_size', 'page_token', 'type_', 'order_by', 'location_id', ), - 'list_info_types': ('parent', 'language_code', 'filter', 'location_id', ), - 'list_inspect_templates': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), - 'list_job_triggers': ('parent', 'page_token', 'page_size', 'order_by', 'filter', 'location_id', ), - 'list_stored_info_types': ('parent', 'page_token', 'page_size', 'order_by', 'location_id', ), - 'redact_image': ('parent', 'location_id', 'inspect_config', 'image_redaction_configs', 'include_findings', 'byte_item', ), - 'reidentify_content': ('parent', 'reidentify_config', 'inspect_config', 'item', 'inspect_template_name', 'reidentify_template_name', 'location_id', ), - 'update_deidentify_template': ('name', 'deidentify_template', 'update_mask', ), - 'update_inspect_template': ('name', 'inspect_template', 'update_mask', ), - 'update_job_trigger': ('name', 'job_trigger', 'update_mask', ), - 'update_stored_info_type': ('name', 'config', 'update_mask', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: not a.keyword.value in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=dlpCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the dlp client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v2/setup.py b/owl-bot-staging/v2/setup.py deleted file mode 100644 index 2beca4f9..00000000 --- a/owl-bot-staging/v2/setup.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import setuptools # type: ignore - -version = '0.1.0' - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, 'README.rst') -with io.open(readme_filename, encoding='utf-8') as readme_file: - readme = readme_file.read() - -setuptools.setup( - name='google-cloud-dlp', - version=version, - long_description=readme, - packages=setuptools.PEP420PackageFinder.find(), - namespace_packages=('google', 'google.cloud'), - platforms='Posix; MacOS X; Windows', - include_package_data=True, - install_requires=( - 'google-api-core[grpc] >= 1.27.0, < 2.0.0dev', - 'libcst >= 0.2.5', - 'proto-plus >= 1.15.0', - 'packaging >= 14.3', ), - python_requires='>=3.6', - classifiers=[ - 'Development Status :: 3 - Alpha', - 'Intended Audience :: Developers', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Topic :: Internet', - 'Topic :: Software Development :: Libraries :: Python Modules', - ], - zip_safe=False, -) diff --git a/owl-bot-staging/v2/tests/__init__.py b/owl-bot-staging/v2/tests/__init__.py deleted file mode 100644 index b54a5fcc..00000000 --- a/owl-bot-staging/v2/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/tests/unit/__init__.py b/owl-bot-staging/v2/tests/unit/__init__.py deleted file mode 100644 index b54a5fcc..00000000 --- a/owl-bot-staging/v2/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/tests/unit/gapic/__init__.py b/owl-bot-staging/v2/tests/unit/gapic/__init__.py deleted file mode 100644 index b54a5fcc..00000000 --- a/owl-bot-staging/v2/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py b/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py deleted file mode 100644 index b54a5fcc..00000000 --- a/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py b/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py deleted file mode 100644 index c8e0b643..00000000 --- a/owl-bot-staging/v2/tests/unit/gapic/dlp_v2/test_dlp_service.py +++ /dev/null @@ -1,9393 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import mock -import packaging.version - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule - - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.dlp_v2.services.dlp_service import DlpServiceAsyncClient -from google.cloud.dlp_v2.services.dlp_service import DlpServiceClient -from google.cloud.dlp_v2.services.dlp_service import pagers -from google.cloud.dlp_v2.services.dlp_service import transports -from google.cloud.dlp_v2.services.dlp_service.transports.base import _GOOGLE_AUTH_VERSION -from google.cloud.dlp_v2.types import dlp -from google.cloud.dlp_v2.types import storage -from google.oauth2 import service_account -from google.protobuf import any_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import field_mask_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from google.type import date_pb2 # type: ignore -from google.type import dayofweek_pb2 # type: ignore -from google.type import timeofday_pb2 # type: ignore -import google.auth - - -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert DlpServiceClient._get_default_mtls_endpoint(None) is None - assert DlpServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert DlpServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert DlpServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert DlpServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert DlpServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class", [ - DlpServiceClient, - DlpServiceAsyncClient, -]) -def test_dlp_service_client_from_service_account_info(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'dlp.googleapis.com:443' - - -@pytest.mark.parametrize("client_class", [ - DlpServiceClient, - DlpServiceAsyncClient, -]) -def test_dlp_service_client_service_account_always_use_jwt(client_class): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - client = client_class(credentials=creds) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.DlpServiceGrpcTransport, "grpc"), - (transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_dlp_service_client_service_account_always_use_jwt_true(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - -@pytest.mark.parametrize("client_class", [ - DlpServiceClient, - DlpServiceAsyncClient, -]) -def test_dlp_service_client_from_service_account_file(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'dlp.googleapis.com:443' - - -def test_dlp_service_client_get_transport_class(): - transport = DlpServiceClient.get_transport_class() - available_transports = [ - transports.DlpServiceGrpcTransport, - ] - assert transport in available_transports - - transport = DlpServiceClient.get_transport_class("grpc") - assert transport == transports.DlpServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc"), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(DlpServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceClient)) -@mock.patch.object(DlpServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceAsyncClient)) -def test_dlp_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(DlpServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(DlpServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class() - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class() - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", "true"), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc", "false"), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(DlpServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceClient)) -@mock.patch.object(DlpServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(DlpServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_dlp_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc"), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_dlp_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (DlpServiceClient, transports.DlpServiceGrpcTransport, "grpc"), - (DlpServiceAsyncClient, transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_dlp_service_client_client_options_credentials_file(client_class, transport_class, transport_name): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - -def test_dlp_service_client_client_options_from_dict(): - with mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = DlpServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - ) - - -def test_inspect_content(transport: str = 'grpc', request_type=dlp.InspectContentRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.inspect_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectContentResponse( - ) - response = client.inspect_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.InspectContentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectContentResponse) - - -def test_inspect_content_from_dict(): - test_inspect_content(request_type=dict) - - -def test_inspect_content_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.inspect_content), - '__call__') as call: - client.inspect_content() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.InspectContentRequest() - - -@pytest.mark.asyncio -async def test_inspect_content_async(transport: str = 'grpc_asyncio', request_type=dlp.InspectContentRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.inspect_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectContentResponse( - )) - response = await client.inspect_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.InspectContentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectContentResponse) - - -@pytest.mark.asyncio -async def test_inspect_content_async_from_dict(): - await test_inspect_content_async(request_type=dict) - - -def test_inspect_content_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.InspectContentRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.inspect_content), - '__call__') as call: - call.return_value = dlp.InspectContentResponse() - client.inspect_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_inspect_content_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.InspectContentRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.inspect_content), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectContentResponse()) - await client.inspect_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_redact_image(transport: str = 'grpc', request_type=dlp.RedactImageRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.redact_image), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.RedactImageResponse( - redacted_image=b'redacted_image_blob', - extracted_text='extracted_text_value', - ) - response = client.redact_image(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.RedactImageRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.RedactImageResponse) - assert response.redacted_image == b'redacted_image_blob' - assert response.extracted_text == 'extracted_text_value' - - -def test_redact_image_from_dict(): - test_redact_image(request_type=dict) - - -def test_redact_image_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.redact_image), - '__call__') as call: - client.redact_image() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.RedactImageRequest() - - -@pytest.mark.asyncio -async def test_redact_image_async(transport: str = 'grpc_asyncio', request_type=dlp.RedactImageRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.redact_image), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.RedactImageResponse( - redacted_image=b'redacted_image_blob', - extracted_text='extracted_text_value', - )) - response = await client.redact_image(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.RedactImageRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.RedactImageResponse) - assert response.redacted_image == b'redacted_image_blob' - assert response.extracted_text == 'extracted_text_value' - - -@pytest.mark.asyncio -async def test_redact_image_async_from_dict(): - await test_redact_image_async(request_type=dict) - - -def test_redact_image_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.RedactImageRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.redact_image), - '__call__') as call: - call.return_value = dlp.RedactImageResponse() - client.redact_image(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_redact_image_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.RedactImageRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.redact_image), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.RedactImageResponse()) - await client.redact_image(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_deidentify_content(transport: str = 'grpc', request_type=dlp.DeidentifyContentRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deidentify_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyContentResponse( - ) - response = client.deidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeidentifyContentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyContentResponse) - - -def test_deidentify_content_from_dict(): - test_deidentify_content(request_type=dict) - - -def test_deidentify_content_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deidentify_content), - '__call__') as call: - client.deidentify_content() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeidentifyContentRequest() - - -@pytest.mark.asyncio -async def test_deidentify_content_async(transport: str = 'grpc_asyncio', request_type=dlp.DeidentifyContentRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deidentify_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyContentResponse( - )) - response = await client.deidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeidentifyContentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyContentResponse) - - -@pytest.mark.asyncio -async def test_deidentify_content_async_from_dict(): - await test_deidentify_content_async(request_type=dict) - - -def test_deidentify_content_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeidentifyContentRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deidentify_content), - '__call__') as call: - call.return_value = dlp.DeidentifyContentResponse() - client.deidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_deidentify_content_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeidentifyContentRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.deidentify_content), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyContentResponse()) - await client.deidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_reidentify_content(transport: str = 'grpc', request_type=dlp.ReidentifyContentRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reidentify_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ReidentifyContentResponse( - ) - response = client.reidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ReidentifyContentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ReidentifyContentResponse) - - -def test_reidentify_content_from_dict(): - test_reidentify_content(request_type=dict) - - -def test_reidentify_content_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reidentify_content), - '__call__') as call: - client.reidentify_content() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ReidentifyContentRequest() - - -@pytest.mark.asyncio -async def test_reidentify_content_async(transport: str = 'grpc_asyncio', request_type=dlp.ReidentifyContentRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reidentify_content), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ReidentifyContentResponse( - )) - response = await client.reidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ReidentifyContentRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ReidentifyContentResponse) - - -@pytest.mark.asyncio -async def test_reidentify_content_async_from_dict(): - await test_reidentify_content_async(request_type=dict) - - -def test_reidentify_content_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ReidentifyContentRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reidentify_content), - '__call__') as call: - call.return_value = dlp.ReidentifyContentResponse() - client.reidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_reidentify_content_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ReidentifyContentRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.reidentify_content), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ReidentifyContentResponse()) - await client.reidentify_content(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_list_info_types(transport: str = 'grpc', request_type=dlp.ListInfoTypesRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListInfoTypesResponse( - ) - response = client.list_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInfoTypesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ListInfoTypesResponse) - - -def test_list_info_types_from_dict(): - test_list_info_types(request_type=dict) - - -def test_list_info_types_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_info_types), - '__call__') as call: - client.list_info_types() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInfoTypesRequest() - - -@pytest.mark.asyncio -async def test_list_info_types_async(transport: str = 'grpc_asyncio', request_type=dlp.ListInfoTypesRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInfoTypesResponse( - )) - response = await client.list_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInfoTypesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.ListInfoTypesResponse) - - -@pytest.mark.asyncio -async def test_list_info_types_async_from_dict(): - await test_list_info_types_async(request_type=dict) - - -def test_list_info_types_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListInfoTypesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_info_types( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -def test_list_info_types_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_info_types( - dlp.ListInfoTypesRequest(), - parent='parent_value', - ) - - -@pytest.mark.asyncio -async def test_list_info_types_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListInfoTypesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInfoTypesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_info_types( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -@pytest.mark.asyncio -async def test_list_info_types_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_info_types( - dlp.ListInfoTypesRequest(), - parent='parent_value', - ) - - -def test_create_inspect_template(transport: str = 'grpc', request_type=dlp.CreateInspectTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.create_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_create_inspect_template_from_dict(): - test_create_inspect_template(request_type=dict) - - -def test_create_inspect_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - client.create_inspect_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateInspectTemplateRequest() - - -@pytest.mark.asyncio -async def test_create_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateInspectTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.create_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_create_inspect_template_async_from_dict(): - await test_create_inspect_template_async(request_type=dict) - - -def test_create_inspect_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateInspectTemplateRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - call.return_value = dlp.InspectTemplate() - client.create_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_inspect_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateInspectTemplateRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - await client.create_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_create_inspect_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_inspect_template( - parent='parent_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].inspect_template == dlp.InspectTemplate(name='name_value') - - -def test_create_inspect_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_inspect_template( - dlp.CreateInspectTemplateRequest(), - parent='parent_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - ) - - -@pytest.mark.asyncio -async def test_create_inspect_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_inspect_template( - parent='parent_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].inspect_template == dlp.InspectTemplate(name='name_value') - - -@pytest.mark.asyncio -async def test_create_inspect_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_inspect_template( - dlp.CreateInspectTemplateRequest(), - parent='parent_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - ) - - -def test_update_inspect_template(transport: str = 'grpc', request_type=dlp.UpdateInspectTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.update_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_update_inspect_template_from_dict(): - test_update_inspect_template(request_type=dict) - - -def test_update_inspect_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - client.update_inspect_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateInspectTemplateRequest() - - -@pytest.mark.asyncio -async def test_update_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateInspectTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.update_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_update_inspect_template_async_from_dict(): - await test_update_inspect_template_async(request_type=dict) - - -def test_update_inspect_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateInspectTemplateRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - call.return_value = dlp.InspectTemplate() - client.update_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_inspect_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateInspectTemplateRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - await client.update_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_update_inspect_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_inspect_template( - name='name_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].inspect_template == dlp.InspectTemplate(name='name_value') - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) - - -def test_update_inspect_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_inspect_template( - dlp.UpdateInspectTemplateRequest(), - name='name_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.asyncio -async def test_update_inspect_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_inspect_template( - name='name_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].inspect_template == dlp.InspectTemplate(name='name_value') - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) - - -@pytest.mark.asyncio -async def test_update_inspect_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_inspect_template( - dlp.UpdateInspectTemplateRequest(), - name='name_value', - inspect_template=dlp.InspectTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_get_inspect_template(transport: str = 'grpc', request_type=dlp.GetInspectTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.get_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_get_inspect_template_from_dict(): - test_get_inspect_template(request_type=dict) - - -def test_get_inspect_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - client.get_inspect_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetInspectTemplateRequest() - - -@pytest.mark.asyncio -async def test_get_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.GetInspectTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.get_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.InspectTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_get_inspect_template_async_from_dict(): - await test_get_inspect_template_async(request_type=dict) - - -def test_get_inspect_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetInspectTemplateRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - call.return_value = dlp.InspectTemplate() - client.get_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_inspect_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetInspectTemplateRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - await client.get_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_get_inspect_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_inspect_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_get_inspect_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_inspect_template( - dlp.GetInspectTemplateRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_get_inspect_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.InspectTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.InspectTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_inspect_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_inspect_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_inspect_template( - dlp.GetInspectTemplateRequest(), - name='name_value', - ) - - -def test_list_inspect_templates(transport: str = 'grpc', request_type=dlp.ListInspectTemplatesRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListInspectTemplatesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_inspect_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInspectTemplatesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInspectTemplatesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_inspect_templates_from_dict(): - test_list_inspect_templates(request_type=dict) - - -def test_list_inspect_templates_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - client.list_inspect_templates() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInspectTemplatesRequest() - - -@pytest.mark.asyncio -async def test_list_inspect_templates_async(transport: str = 'grpc_asyncio', request_type=dlp.ListInspectTemplatesRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_inspect_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListInspectTemplatesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListInspectTemplatesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_inspect_templates_async_from_dict(): - await test_list_inspect_templates_async(request_type=dict) - - -def test_list_inspect_templates_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListInspectTemplatesRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - call.return_value = dlp.ListInspectTemplatesResponse() - client.list_inspect_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_inspect_templates_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListInspectTemplatesRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse()) - await client.list_inspect_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_list_inspect_templates_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListInspectTemplatesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_inspect_templates( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -def test_list_inspect_templates_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_inspect_templates( - dlp.ListInspectTemplatesRequest(), - parent='parent_value', - ) - - -@pytest.mark.asyncio -async def test_list_inspect_templates_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListInspectTemplatesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListInspectTemplatesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_inspect_templates( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -@pytest.mark.asyncio -async def test_list_inspect_templates_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_inspect_templates( - dlp.ListInspectTemplatesRequest(), - parent='parent_value', - ) - - -def test_list_inspect_templates_pager(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - next_page_token='abc', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[], - next_page_token='def', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_inspect_templates(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, dlp.InspectTemplate) - for i in results) - -def test_list_inspect_templates_pages(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - next_page_token='abc', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[], - next_page_token='def', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - ), - RuntimeError, - ) - pages = list(client.list_inspect_templates(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_inspect_templates_async_pager(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - next_page_token='abc', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[], - next_page_token='def', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_inspect_templates(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.InspectTemplate) - for i in responses) - -@pytest.mark.asyncio -async def test_list_inspect_templates_async_pages(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_inspect_templates), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - next_page_token='abc', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[], - next_page_token='def', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListInspectTemplatesResponse( - inspect_templates=[ - dlp.InspectTemplate(), - dlp.InspectTemplate(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_inspect_templates(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_delete_inspect_template(transport: str = 'grpc', request_type=dlp.DeleteInspectTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_inspect_template_from_dict(): - test_delete_inspect_template(request_type=dict) - - -def test_delete_inspect_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - client.delete_inspect_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteInspectTemplateRequest() - - -@pytest.mark.asyncio -async def test_delete_inspect_template_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteInspectTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteInspectTemplateRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_inspect_template_async_from_dict(): - await test_delete_inspect_template_async(request_type=dict) - - -def test_delete_inspect_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteInspectTemplateRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - call.return_value = None - client.delete_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_inspect_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteInspectTemplateRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_inspect_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_delete_inspect_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_inspect_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_delete_inspect_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_inspect_template( - dlp.DeleteInspectTemplateRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_delete_inspect_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_inspect_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_inspect_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_delete_inspect_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_inspect_template( - dlp.DeleteInspectTemplateRequest(), - name='name_value', - ) - - -def test_create_deidentify_template(transport: str = 'grpc', request_type=dlp.CreateDeidentifyTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.create_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_create_deidentify_template_from_dict(): - test_create_deidentify_template(request_type=dict) - - -def test_create_deidentify_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - client.create_deidentify_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDeidentifyTemplateRequest() - - -@pytest.mark.asyncio -async def test_create_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateDeidentifyTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.create_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_create_deidentify_template_async_from_dict(): - await test_create_deidentify_template_async(request_type=dict) - - -def test_create_deidentify_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateDeidentifyTemplateRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - call.return_value = dlp.DeidentifyTemplate() - client.create_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_deidentify_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateDeidentifyTemplateRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) - await client.create_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_create_deidentify_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_deidentify_template( - parent='parent_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].deidentify_template == dlp.DeidentifyTemplate(name='name_value') - - -def test_create_deidentify_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_deidentify_template( - dlp.CreateDeidentifyTemplateRequest(), - parent='parent_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - ) - - -@pytest.mark.asyncio -async def test_create_deidentify_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_deidentify_template( - parent='parent_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].deidentify_template == dlp.DeidentifyTemplate(name='name_value') - - -@pytest.mark.asyncio -async def test_create_deidentify_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_deidentify_template( - dlp.CreateDeidentifyTemplateRequest(), - parent='parent_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - ) - - -def test_update_deidentify_template(transport: str = 'grpc', request_type=dlp.UpdateDeidentifyTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.update_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_update_deidentify_template_from_dict(): - test_update_deidentify_template(request_type=dict) - - -def test_update_deidentify_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - client.update_deidentify_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateDeidentifyTemplateRequest() - - -@pytest.mark.asyncio -async def test_update_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateDeidentifyTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.update_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_update_deidentify_template_async_from_dict(): - await test_update_deidentify_template_async(request_type=dict) - - -def test_update_deidentify_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateDeidentifyTemplateRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - call.return_value = dlp.DeidentifyTemplate() - client.update_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_deidentify_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateDeidentifyTemplateRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) - await client.update_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_update_deidentify_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_deidentify_template( - name='name_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].deidentify_template == dlp.DeidentifyTemplate(name='name_value') - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) - - -def test_update_deidentify_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_deidentify_template( - dlp.UpdateDeidentifyTemplateRequest(), - name='name_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.asyncio -async def test_update_deidentify_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_deidentify_template( - name='name_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].deidentify_template == dlp.DeidentifyTemplate(name='name_value') - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) - - -@pytest.mark.asyncio -async def test_update_deidentify_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_deidentify_template( - dlp.UpdateDeidentifyTemplateRequest(), - name='name_value', - deidentify_template=dlp.DeidentifyTemplate(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_get_deidentify_template(transport: str = 'grpc', request_type=dlp.GetDeidentifyTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - ) - response = client.get_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -def test_get_deidentify_template_from_dict(): - test_get_deidentify_template(request_type=dict) - - -def test_get_deidentify_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - client.get_deidentify_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDeidentifyTemplateRequest() - - -@pytest.mark.asyncio -async def test_get_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.GetDeidentifyTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate( - name='name_value', - display_name='display_name_value', - description='description_value', - )) - response = await client.get_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DeidentifyTemplate) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - - -@pytest.mark.asyncio -async def test_get_deidentify_template_async_from_dict(): - await test_get_deidentify_template_async(request_type=dict) - - -def test_get_deidentify_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetDeidentifyTemplateRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - call.return_value = dlp.DeidentifyTemplate() - client.get_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_deidentify_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetDeidentifyTemplateRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) - await client.get_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_get_deidentify_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_deidentify_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_get_deidentify_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_deidentify_template( - dlp.GetDeidentifyTemplateRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_get_deidentify_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DeidentifyTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DeidentifyTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_deidentify_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_deidentify_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_deidentify_template( - dlp.GetDeidentifyTemplateRequest(), - name='name_value', - ) - - -def test_list_deidentify_templates(transport: str = 'grpc', request_type=dlp.ListDeidentifyTemplatesRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDeidentifyTemplatesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_deidentify_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDeidentifyTemplatesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDeidentifyTemplatesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_deidentify_templates_from_dict(): - test_list_deidentify_templates(request_type=dict) - - -def test_list_deidentify_templates_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - client.list_deidentify_templates() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDeidentifyTemplatesRequest() - - -@pytest.mark.asyncio -async def test_list_deidentify_templates_async(transport: str = 'grpc_asyncio', request_type=dlp.ListDeidentifyTemplatesRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_deidentify_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDeidentifyTemplatesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDeidentifyTemplatesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_deidentify_templates_async_from_dict(): - await test_list_deidentify_templates_async(request_type=dict) - - -def test_list_deidentify_templates_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListDeidentifyTemplatesRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - call.return_value = dlp.ListDeidentifyTemplatesResponse() - client.list_deidentify_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_deidentify_templates_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListDeidentifyTemplatesRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse()) - await client.list_deidentify_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_list_deidentify_templates_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDeidentifyTemplatesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_deidentify_templates( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -def test_list_deidentify_templates_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_deidentify_templates( - dlp.ListDeidentifyTemplatesRequest(), - parent='parent_value', - ) - - -@pytest.mark.asyncio -async def test_list_deidentify_templates_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDeidentifyTemplatesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDeidentifyTemplatesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_deidentify_templates( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -@pytest.mark.asyncio -async def test_list_deidentify_templates_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_deidentify_templates( - dlp.ListDeidentifyTemplatesRequest(), - parent='parent_value', - ) - - -def test_list_deidentify_templates_pager(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - next_page_token='abc', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[], - next_page_token='def', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_deidentify_templates(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, dlp.DeidentifyTemplate) - for i in results) - -def test_list_deidentify_templates_pages(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - next_page_token='abc', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[], - next_page_token='def', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - ), - RuntimeError, - ) - pages = list(client.list_deidentify_templates(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_deidentify_templates_async_pager(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - next_page_token='abc', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[], - next_page_token='def', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_deidentify_templates(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.DeidentifyTemplate) - for i in responses) - -@pytest.mark.asyncio -async def test_list_deidentify_templates_async_pages(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_deidentify_templates), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - next_page_token='abc', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[], - next_page_token='def', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - ], - next_page_token='ghi', - ), - dlp.ListDeidentifyTemplatesResponse( - deidentify_templates=[ - dlp.DeidentifyTemplate(), - dlp.DeidentifyTemplate(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_deidentify_templates(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_delete_deidentify_template(transport: str = 'grpc', request_type=dlp.DeleteDeidentifyTemplateRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_deidentify_template_from_dict(): - test_delete_deidentify_template(request_type=dict) - - -def test_delete_deidentify_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - client.delete_deidentify_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDeidentifyTemplateRequest() - - -@pytest.mark.asyncio -async def test_delete_deidentify_template_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteDeidentifyTemplateRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDeidentifyTemplateRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_deidentify_template_async_from_dict(): - await test_delete_deidentify_template_async(request_type=dict) - - -def test_delete_deidentify_template_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteDeidentifyTemplateRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - call.return_value = None - client.delete_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_deidentify_template_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteDeidentifyTemplateRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_deidentify_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_delete_deidentify_template_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_deidentify_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_delete_deidentify_template_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_deidentify_template( - dlp.DeleteDeidentifyTemplateRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_delete_deidentify_template_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_deidentify_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_deidentify_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_delete_deidentify_template_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_deidentify_template( - dlp.DeleteDeidentifyTemplateRequest(), - name='name_value', - ) - - -def test_create_job_trigger(transport: str = 'grpc', request_type=dlp.CreateJobTriggerRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - ) - response = client.create_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -def test_create_job_trigger_from_dict(): - test_create_job_trigger(request_type=dict) - - -def test_create_job_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - client.create_job_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateJobTriggerRequest() - - -@pytest.mark.asyncio -async def test_create_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateJobTriggerRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - )) - response = await client.create_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -@pytest.mark.asyncio -async def test_create_job_trigger_async_from_dict(): - await test_create_job_trigger_async(request_type=dict) - - -def test_create_job_trigger_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateJobTriggerRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - call.return_value = dlp.JobTrigger() - client.create_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateJobTriggerRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - await client.create_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_create_job_trigger_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_job_trigger( - parent='parent_value', - job_trigger=dlp.JobTrigger(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].job_trigger == dlp.JobTrigger(name='name_value') - - -def test_create_job_trigger_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_job_trigger( - dlp.CreateJobTriggerRequest(), - parent='parent_value', - job_trigger=dlp.JobTrigger(name='name_value'), - ) - - -@pytest.mark.asyncio -async def test_create_job_trigger_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_job_trigger( - parent='parent_value', - job_trigger=dlp.JobTrigger(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].job_trigger == dlp.JobTrigger(name='name_value') - - -@pytest.mark.asyncio -async def test_create_job_trigger_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_job_trigger( - dlp.CreateJobTriggerRequest(), - parent='parent_value', - job_trigger=dlp.JobTrigger(name='name_value'), - ) - - -def test_update_job_trigger(transport: str = 'grpc', request_type=dlp.UpdateJobTriggerRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - ) - response = client.update_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -def test_update_job_trigger_from_dict(): - test_update_job_trigger(request_type=dict) - - -def test_update_job_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - client.update_job_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateJobTriggerRequest() - - -@pytest.mark.asyncio -async def test_update_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateJobTriggerRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - )) - response = await client.update_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -@pytest.mark.asyncio -async def test_update_job_trigger_async_from_dict(): - await test_update_job_trigger_async(request_type=dict) - - -def test_update_job_trigger_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateJobTriggerRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - call.return_value = dlp.JobTrigger() - client.update_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateJobTriggerRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - await client.update_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_update_job_trigger_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_job_trigger( - name='name_value', - job_trigger=dlp.JobTrigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].job_trigger == dlp.JobTrigger(name='name_value') - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) - - -def test_update_job_trigger_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_job_trigger( - dlp.UpdateJobTriggerRequest(), - name='name_value', - job_trigger=dlp.JobTrigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.asyncio -async def test_update_job_trigger_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_job_trigger( - name='name_value', - job_trigger=dlp.JobTrigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].job_trigger == dlp.JobTrigger(name='name_value') - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) - - -@pytest.mark.asyncio -async def test_update_job_trigger_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_job_trigger( - dlp.UpdateJobTriggerRequest(), - name='name_value', - job_trigger=dlp.JobTrigger(name='name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_hybrid_inspect_job_trigger(transport: str = 'grpc', request_type=dlp.HybridInspectJobTriggerRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.HybridInspectResponse( - ) - response = client.hybrid_inspect_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.HybridInspectResponse) - - -def test_hybrid_inspect_job_trigger_from_dict(): - test_hybrid_inspect_job_trigger(request_type=dict) - - -def test_hybrid_inspect_job_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - client.hybrid_inspect_job_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectJobTriggerRequest() - - -@pytest.mark.asyncio -async def test_hybrid_inspect_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.HybridInspectJobTriggerRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse( - )) - response = await client.hybrid_inspect_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.HybridInspectResponse) - - -@pytest.mark.asyncio -async def test_hybrid_inspect_job_trigger_async_from_dict(): - await test_hybrid_inspect_job_trigger_async(request_type=dict) - - -def test_hybrid_inspect_job_trigger_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.HybridInspectJobTriggerRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - call.return_value = dlp.HybridInspectResponse() - client.hybrid_inspect_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_hybrid_inspect_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.HybridInspectJobTriggerRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) - await client.hybrid_inspect_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_hybrid_inspect_job_trigger_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.HybridInspectResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.hybrid_inspect_job_trigger( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_hybrid_inspect_job_trigger_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.hybrid_inspect_job_trigger( - dlp.HybridInspectJobTriggerRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_hybrid_inspect_job_trigger_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.HybridInspectResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.hybrid_inspect_job_trigger( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_hybrid_inspect_job_trigger_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.hybrid_inspect_job_trigger( - dlp.HybridInspectJobTriggerRequest(), - name='name_value', - ) - - -def test_get_job_trigger(transport: str = 'grpc', request_type=dlp.GetJobTriggerRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - ) - response = client.get_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -def test_get_job_trigger_from_dict(): - test_get_job_trigger(request_type=dict) - - -def test_get_job_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - client.get_job_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetJobTriggerRequest() - - -@pytest.mark.asyncio -async def test_get_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.GetJobTriggerRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger( - name='name_value', - display_name='display_name_value', - description='description_value', - status=dlp.JobTrigger.Status.HEALTHY, - )) - response = await client.get_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.JobTrigger) - assert response.name == 'name_value' - assert response.display_name == 'display_name_value' - assert response.description == 'description_value' - assert response.status == dlp.JobTrigger.Status.HEALTHY - - -@pytest.mark.asyncio -async def test_get_job_trigger_async_from_dict(): - await test_get_job_trigger_async(request_type=dict) - - -def test_get_job_trigger_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetJobTriggerRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - call.return_value = dlp.JobTrigger() - client.get_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetJobTriggerRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - await client.get_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_get_job_trigger_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_job_trigger( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_get_job_trigger_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_job_trigger( - dlp.GetJobTriggerRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_get_job_trigger_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.JobTrigger() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.JobTrigger()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_job_trigger( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_job_trigger_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_job_trigger( - dlp.GetJobTriggerRequest(), - name='name_value', - ) - - -def test_list_job_triggers(transport: str = 'grpc', request_type=dlp.ListJobTriggersRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListJobTriggersResponse( - next_page_token='next_page_token_value', - ) - response = client.list_job_triggers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListJobTriggersRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobTriggersPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_job_triggers_from_dict(): - test_list_job_triggers(request_type=dict) - - -def test_list_job_triggers_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - client.list_job_triggers() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListJobTriggersRequest() - - -@pytest.mark.asyncio -async def test_list_job_triggers_async(transport: str = 'grpc_asyncio', request_type=dlp.ListJobTriggersRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_job_triggers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListJobTriggersRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobTriggersAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_job_triggers_async_from_dict(): - await test_list_job_triggers_async(request_type=dict) - - -def test_list_job_triggers_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListJobTriggersRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - call.return_value = dlp.ListJobTriggersResponse() - client.list_job_triggers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_job_triggers_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListJobTriggersRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse()) - await client.list_job_triggers(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_list_job_triggers_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListJobTriggersResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_job_triggers( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -def test_list_job_triggers_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_job_triggers( - dlp.ListJobTriggersRequest(), - parent='parent_value', - ) - - -@pytest.mark.asyncio -async def test_list_job_triggers_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListJobTriggersResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListJobTriggersResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_job_triggers( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -@pytest.mark.asyncio -async def test_list_job_triggers_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_job_triggers( - dlp.ListJobTriggersRequest(), - parent='parent_value', - ) - - -def test_list_job_triggers_pager(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - next_page_token='abc', - ), - dlp.ListJobTriggersResponse( - job_triggers=[], - next_page_token='def', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - ], - next_page_token='ghi', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_job_triggers(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, dlp.JobTrigger) - for i in results) - -def test_list_job_triggers_pages(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - next_page_token='abc', - ), - dlp.ListJobTriggersResponse( - job_triggers=[], - next_page_token='def', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - ], - next_page_token='ghi', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - ), - RuntimeError, - ) - pages = list(client.list_job_triggers(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_job_triggers_async_pager(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - next_page_token='abc', - ), - dlp.ListJobTriggersResponse( - job_triggers=[], - next_page_token='def', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - ], - next_page_token='ghi', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_job_triggers(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.JobTrigger) - for i in responses) - -@pytest.mark.asyncio -async def test_list_job_triggers_async_pages(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_triggers), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - next_page_token='abc', - ), - dlp.ListJobTriggersResponse( - job_triggers=[], - next_page_token='def', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - ], - next_page_token='ghi', - ), - dlp.ListJobTriggersResponse( - job_triggers=[ - dlp.JobTrigger(), - dlp.JobTrigger(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_job_triggers(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_delete_job_trigger(transport: str = 'grpc', request_type=dlp.DeleteJobTriggerRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_job_trigger_from_dict(): - test_delete_job_trigger(request_type=dict) - - -def test_delete_job_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - client.delete_job_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteJobTriggerRequest() - - -@pytest.mark.asyncio -async def test_delete_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteJobTriggerRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_job_trigger_async_from_dict(): - await test_delete_job_trigger_async(request_type=dict) - - -def test_delete_job_trigger_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteJobTriggerRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - call.return_value = None - client.delete_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteJobTriggerRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_delete_job_trigger_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_job_trigger( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_delete_job_trigger_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_job_trigger( - dlp.DeleteJobTriggerRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_delete_job_trigger_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_job_trigger( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_delete_job_trigger_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_job_trigger( - dlp.DeleteJobTriggerRequest(), - name='name_value', - ) - - -def test_activate_job_trigger(transport: str = 'grpc', request_type=dlp.ActivateJobTriggerRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.activate_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - response = client.activate_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ActivateJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -def test_activate_job_trigger_from_dict(): - test_activate_job_trigger(request_type=dict) - - -def test_activate_job_trigger_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.activate_job_trigger), - '__call__') as call: - client.activate_job_trigger() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ActivateJobTriggerRequest() - - -@pytest.mark.asyncio -async def test_activate_job_trigger_async(transport: str = 'grpc_asyncio', request_type=dlp.ActivateJobTriggerRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.activate_job_trigger), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - )) - response = await client.activate_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ActivateJobTriggerRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -@pytest.mark.asyncio -async def test_activate_job_trigger_async_from_dict(): - await test_activate_job_trigger_async(request_type=dict) - - -def test_activate_job_trigger_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ActivateJobTriggerRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.activate_job_trigger), - '__call__') as call: - call.return_value = dlp.DlpJob() - client.activate_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_activate_job_trigger_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ActivateJobTriggerRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.activate_job_trigger), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) - await client.activate_job_trigger(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_create_dlp_job(transport: str = 'grpc', request_type=dlp.CreateDlpJobRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - response = client.create_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDlpJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -def test_create_dlp_job_from_dict(): - test_create_dlp_job(request_type=dict) - - -def test_create_dlp_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - client.create_dlp_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDlpJobRequest() - - -@pytest.mark.asyncio -async def test_create_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateDlpJobRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - )) - response = await client.create_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateDlpJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -@pytest.mark.asyncio -async def test_create_dlp_job_async_from_dict(): - await test_create_dlp_job_async(request_type=dict) - - -def test_create_dlp_job_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateDlpJobRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - call.return_value = dlp.DlpJob() - client.create_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateDlpJobRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) - await client.create_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_create_dlp_job_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_dlp_job( - parent='parent_value', - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].risk_job == dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))) - - -def test_create_dlp_job_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_dlp_job( - dlp.CreateDlpJobRequest(), - parent='parent_value', - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - - -@pytest.mark.asyncio -async def test_create_dlp_job_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_dlp_job( - parent='parent_value', - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].risk_job == dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))) - - -@pytest.mark.asyncio -async def test_create_dlp_job_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_dlp_job( - dlp.CreateDlpJobRequest(), - parent='parent_value', - inspect_job=dlp.InspectJobConfig(storage_config=storage.StorageConfig(datastore_options=storage.DatastoreOptions(partition_id=storage.PartitionId(project_id='project_id_value')))), - risk_job=dlp.RiskAnalysisJobConfig(privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - - -def test_list_dlp_jobs(transport: str = 'grpc', request_type=dlp.ListDlpJobsRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDlpJobsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_dlp_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDlpJobsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDlpJobsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_dlp_jobs_from_dict(): - test_list_dlp_jobs(request_type=dict) - - -def test_list_dlp_jobs_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - client.list_dlp_jobs() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDlpJobsRequest() - - -@pytest.mark.asyncio -async def test_list_dlp_jobs_async(transport: str = 'grpc_asyncio', request_type=dlp.ListDlpJobsRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_dlp_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListDlpJobsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDlpJobsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_dlp_jobs_async_from_dict(): - await test_list_dlp_jobs_async(request_type=dict) - - -def test_list_dlp_jobs_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListDlpJobsRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - call.return_value = dlp.ListDlpJobsResponse() - client.list_dlp_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_dlp_jobs_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListDlpJobsRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse()) - await client.list_dlp_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_list_dlp_jobs_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDlpJobsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_dlp_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -def test_list_dlp_jobs_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_dlp_jobs( - dlp.ListDlpJobsRequest(), - parent='parent_value', - ) - - -@pytest.mark.asyncio -async def test_list_dlp_jobs_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListDlpJobsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListDlpJobsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_dlp_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -@pytest.mark.asyncio -async def test_list_dlp_jobs_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_dlp_jobs( - dlp.ListDlpJobsRequest(), - parent='parent_value', - ) - - -def test_list_dlp_jobs_pager(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - dlp.DlpJob(), - ], - next_page_token='abc', - ), - dlp.ListDlpJobsResponse( - jobs=[], - next_page_token='def', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - ], - next_page_token='ghi', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_dlp_jobs(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, dlp.DlpJob) - for i in results) - -def test_list_dlp_jobs_pages(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - dlp.DlpJob(), - ], - next_page_token='abc', - ), - dlp.ListDlpJobsResponse( - jobs=[], - next_page_token='def', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - ], - next_page_token='ghi', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - ], - ), - RuntimeError, - ) - pages = list(client.list_dlp_jobs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_dlp_jobs_async_pager(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - dlp.DlpJob(), - ], - next_page_token='abc', - ), - dlp.ListDlpJobsResponse( - jobs=[], - next_page_token='def', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - ], - next_page_token='ghi', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_dlp_jobs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.DlpJob) - for i in responses) - -@pytest.mark.asyncio -async def test_list_dlp_jobs_async_pages(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_dlp_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - dlp.DlpJob(), - ], - next_page_token='abc', - ), - dlp.ListDlpJobsResponse( - jobs=[], - next_page_token='def', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - ], - next_page_token='ghi', - ), - dlp.ListDlpJobsResponse( - jobs=[ - dlp.DlpJob(), - dlp.DlpJob(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_dlp_jobs(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_get_dlp_job(transport: str = 'grpc', request_type=dlp.GetDlpJobRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - risk_details=dlp.AnalyzeDataSourceRiskDetails(requested_privacy_metric=dlp.PrivacyMetric(numerical_stats_config=dlp.PrivacyMetric.NumericalStatsConfig(field=storage.FieldId(name='name_value')))), - ) - response = client.get_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDlpJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -def test_get_dlp_job_from_dict(): - test_get_dlp_job(request_type=dict) - - -def test_get_dlp_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - client.get_dlp_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDlpJobRequest() - - -@pytest.mark.asyncio -async def test_get_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.GetDlpJobRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob( - name='name_value', - type_=dlp.DlpJobType.INSPECT_JOB, - state=dlp.DlpJob.JobState.PENDING, - job_trigger_name='job_trigger_name_value', - )) - response = await client.get_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetDlpJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.DlpJob) - assert response.name == 'name_value' - assert response.type_ == dlp.DlpJobType.INSPECT_JOB - assert response.state == dlp.DlpJob.JobState.PENDING - assert response.job_trigger_name == 'job_trigger_name_value' - - -@pytest.mark.asyncio -async def test_get_dlp_job_async_from_dict(): - await test_get_dlp_job_async(request_type=dict) - - -def test_get_dlp_job_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetDlpJobRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - call.return_value = dlp.DlpJob() - client.get_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetDlpJobRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) - await client.get_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_get_dlp_job_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_dlp_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_get_dlp_job_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_dlp_job( - dlp.GetDlpJobRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_get_dlp_job_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.DlpJob() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.DlpJob()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_dlp_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_dlp_job_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_dlp_job( - dlp.GetDlpJobRequest(), - name='name_value', - ) - - -def test_delete_dlp_job(transport: str = 'grpc', request_type=dlp.DeleteDlpJobRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDlpJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_dlp_job_from_dict(): - test_delete_dlp_job(request_type=dict) - - -def test_delete_dlp_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - client.delete_dlp_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDlpJobRequest() - - -@pytest.mark.asyncio -async def test_delete_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteDlpJobRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteDlpJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_dlp_job_async_from_dict(): - await test_delete_dlp_job_async(request_type=dict) - - -def test_delete_dlp_job_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteDlpJobRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - call.return_value = None - client.delete_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteDlpJobRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_delete_dlp_job_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_dlp_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_delete_dlp_job_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_dlp_job( - dlp.DeleteDlpJobRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_delete_dlp_job_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_dlp_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_delete_dlp_job_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_dlp_job( - dlp.DeleteDlpJobRequest(), - name='name_value', - ) - - -def test_cancel_dlp_job(transport: str = 'grpc', request_type=dlp.CancelDlpJobRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.cancel_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CancelDlpJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_cancel_dlp_job_from_dict(): - test_cancel_dlp_job(request_type=dict) - - -def test_cancel_dlp_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_dlp_job), - '__call__') as call: - client.cancel_dlp_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CancelDlpJobRequest() - - -@pytest.mark.asyncio -async def test_cancel_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.CancelDlpJobRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.cancel_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CancelDlpJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_cancel_dlp_job_async_from_dict(): - await test_cancel_dlp_job_async(request_type=dict) - - -def test_cancel_dlp_job_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CancelDlpJobRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_dlp_job), - '__call__') as call: - call.return_value = None - client.cancel_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_cancel_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CancelDlpJobRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.cancel_dlp_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.cancel_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_create_stored_info_type(transport: str = 'grpc', request_type=dlp.CreateStoredInfoTypeRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType( - name='name_value', - ) - response = client.create_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -def test_create_stored_info_type_from_dict(): - test_create_stored_info_type(request_type=dict) - - -def test_create_stored_info_type_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - client.create_stored_info_type() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateStoredInfoTypeRequest() - - -@pytest.mark.asyncio -async def test_create_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.CreateStoredInfoTypeRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( - name='name_value', - )) - response = await client.create_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.CreateStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_create_stored_info_type_async_from_dict(): - await test_create_stored_info_type_async(request_type=dict) - - -def test_create_stored_info_type_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateStoredInfoTypeRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - call.return_value = dlp.StoredInfoType() - client.create_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_stored_info_type_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.CreateStoredInfoTypeRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - await client.create_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_create_stored_info_type_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_stored_info_type( - parent='parent_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].config == dlp.StoredInfoTypeConfig(display_name='display_name_value') - - -def test_create_stored_info_type_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_stored_info_type( - dlp.CreateStoredInfoTypeRequest(), - parent='parent_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - ) - - -@pytest.mark.asyncio -async def test_create_stored_info_type_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_stored_info_type( - parent='parent_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].config == dlp.StoredInfoTypeConfig(display_name='display_name_value') - - -@pytest.mark.asyncio -async def test_create_stored_info_type_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_stored_info_type( - dlp.CreateStoredInfoTypeRequest(), - parent='parent_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - ) - - -def test_update_stored_info_type(transport: str = 'grpc', request_type=dlp.UpdateStoredInfoTypeRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType( - name='name_value', - ) - response = client.update_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -def test_update_stored_info_type_from_dict(): - test_update_stored_info_type(request_type=dict) - - -def test_update_stored_info_type_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - client.update_stored_info_type() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateStoredInfoTypeRequest() - - -@pytest.mark.asyncio -async def test_update_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.UpdateStoredInfoTypeRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( - name='name_value', - )) - response = await client.update_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.UpdateStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_update_stored_info_type_async_from_dict(): - await test_update_stored_info_type_async(request_type=dict) - - -def test_update_stored_info_type_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateStoredInfoTypeRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - call.return_value = dlp.StoredInfoType() - client.update_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_update_stored_info_type_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.UpdateStoredInfoTypeRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - await client.update_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_update_stored_info_type_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.update_stored_info_type( - name='name_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].config == dlp.StoredInfoTypeConfig(display_name='display_name_value') - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) - - -def test_update_stored_info_type_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.update_stored_info_type( - dlp.UpdateStoredInfoTypeRequest(), - name='name_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -@pytest.mark.asyncio -async def test_update_stored_info_type_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.update_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.update_stored_info_type( - name='name_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - assert args[0].config == dlp.StoredInfoTypeConfig(display_name='display_name_value') - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=['paths_value']) - - -@pytest.mark.asyncio -async def test_update_stored_info_type_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.update_stored_info_type( - dlp.UpdateStoredInfoTypeRequest(), - name='name_value', - config=dlp.StoredInfoTypeConfig(display_name='display_name_value'), - update_mask=field_mask_pb2.FieldMask(paths=['paths_value']), - ) - - -def test_get_stored_info_type(transport: str = 'grpc', request_type=dlp.GetStoredInfoTypeRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType( - name='name_value', - ) - response = client.get_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -def test_get_stored_info_type_from_dict(): - test_get_stored_info_type(request_type=dict) - - -def test_get_stored_info_type_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - client.get_stored_info_type() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetStoredInfoTypeRequest() - - -@pytest.mark.asyncio -async def test_get_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.GetStoredInfoTypeRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType( - name='name_value', - )) - response = await client.get_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.GetStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.StoredInfoType) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_stored_info_type_async_from_dict(): - await test_get_stored_info_type_async(request_type=dict) - - -def test_get_stored_info_type_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetStoredInfoTypeRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - call.return_value = dlp.StoredInfoType() - client.get_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_stored_info_type_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.GetStoredInfoTypeRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - await client.get_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_get_stored_info_type_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_stored_info_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_get_stored_info_type_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_stored_info_type( - dlp.GetStoredInfoTypeRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_get_stored_info_type_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.StoredInfoType() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.StoredInfoType()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_stored_info_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_stored_info_type_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_stored_info_type( - dlp.GetStoredInfoTypeRequest(), - name='name_value', - ) - - -def test_list_stored_info_types(transport: str = 'grpc', request_type=dlp.ListStoredInfoTypesRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListStoredInfoTypesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_stored_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListStoredInfoTypesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListStoredInfoTypesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_stored_info_types_from_dict(): - test_list_stored_info_types(request_type=dict) - - -def test_list_stored_info_types_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - client.list_stored_info_types() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListStoredInfoTypesRequest() - - -@pytest.mark.asyncio -async def test_list_stored_info_types_async(transport: str = 'grpc_asyncio', request_type=dlp.ListStoredInfoTypesRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_stored_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.ListStoredInfoTypesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListStoredInfoTypesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_stored_info_types_async_from_dict(): - await test_list_stored_info_types_async(request_type=dict) - - -def test_list_stored_info_types_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListStoredInfoTypesRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - call.return_value = dlp.ListStoredInfoTypesResponse() - client.list_stored_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_stored_info_types_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.ListStoredInfoTypesRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse()) - await client.list_stored_info_types(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_list_stored_info_types_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListStoredInfoTypesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_stored_info_types( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -def test_list_stored_info_types_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_stored_info_types( - dlp.ListStoredInfoTypesRequest(), - parent='parent_value', - ) - - -@pytest.mark.asyncio -async def test_list_stored_info_types_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.ListStoredInfoTypesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.ListStoredInfoTypesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_stored_info_types( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -@pytest.mark.asyncio -async def test_list_stored_info_types_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_stored_info_types( - dlp.ListStoredInfoTypesRequest(), - parent='parent_value', - ) - - -def test_list_stored_info_types_pager(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - next_page_token='abc', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[], - next_page_token='def', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - ], - next_page_token='ghi', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_stored_info_types(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, dlp.StoredInfoType) - for i in results) - -def test_list_stored_info_types_pages(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - next_page_token='abc', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[], - next_page_token='def', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - ], - next_page_token='ghi', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - ), - RuntimeError, - ) - pages = list(client.list_stored_info_types(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_stored_info_types_async_pager(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - next_page_token='abc', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[], - next_page_token='def', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - ], - next_page_token='ghi', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_stored_info_types(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, dlp.StoredInfoType) - for i in responses) - -@pytest.mark.asyncio -async def test_list_stored_info_types_async_pages(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_stored_info_types), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - next_page_token='abc', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[], - next_page_token='def', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - ], - next_page_token='ghi', - ), - dlp.ListStoredInfoTypesResponse( - stored_info_types=[ - dlp.StoredInfoType(), - dlp.StoredInfoType(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_stored_info_types(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_delete_stored_info_type(transport: str = 'grpc', request_type=dlp.DeleteStoredInfoTypeRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_stored_info_type_from_dict(): - test_delete_stored_info_type(request_type=dict) - - -def test_delete_stored_info_type_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - client.delete_stored_info_type() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteStoredInfoTypeRequest() - - -@pytest.mark.asyncio -async def test_delete_stored_info_type_async(transport: str = 'grpc_asyncio', request_type=dlp.DeleteStoredInfoTypeRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.DeleteStoredInfoTypeRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_stored_info_type_async_from_dict(): - await test_delete_stored_info_type_async(request_type=dict) - - -def test_delete_stored_info_type_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteStoredInfoTypeRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - call.return_value = None - client.delete_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_stored_info_type_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.DeleteStoredInfoTypeRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_stored_info_type(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_delete_stored_info_type_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_stored_info_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_delete_stored_info_type_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_stored_info_type( - dlp.DeleteStoredInfoTypeRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_delete_stored_info_type_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_stored_info_type), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_stored_info_type( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_delete_stored_info_type_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_stored_info_type( - dlp.DeleteStoredInfoTypeRequest(), - name='name_value', - ) - - -def test_hybrid_inspect_dlp_job(transport: str = 'grpc', request_type=dlp.HybridInspectDlpJobRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.HybridInspectResponse( - ) - response = client.hybrid_inspect_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectDlpJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.HybridInspectResponse) - - -def test_hybrid_inspect_dlp_job_from_dict(): - test_hybrid_inspect_dlp_job(request_type=dict) - - -def test_hybrid_inspect_dlp_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - client.hybrid_inspect_dlp_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectDlpJobRequest() - - -@pytest.mark.asyncio -async def test_hybrid_inspect_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.HybridInspectDlpJobRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse( - )) - response = await client.hybrid_inspect_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.HybridInspectDlpJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, dlp.HybridInspectResponse) - - -@pytest.mark.asyncio -async def test_hybrid_inspect_dlp_job_async_from_dict(): - await test_hybrid_inspect_dlp_job_async(request_type=dict) - - -def test_hybrid_inspect_dlp_job_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.HybridInspectDlpJobRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - call.return_value = dlp.HybridInspectResponse() - client.hybrid_inspect_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_hybrid_inspect_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.HybridInspectDlpJobRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) - await client.hybrid_inspect_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_hybrid_inspect_dlp_job_flattened(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.HybridInspectResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.hybrid_inspect_dlp_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_hybrid_inspect_dlp_job_flattened_error(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.hybrid_inspect_dlp_job( - dlp.HybridInspectDlpJobRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_hybrid_inspect_dlp_job_flattened_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.hybrid_inspect_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = dlp.HybridInspectResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dlp.HybridInspectResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.hybrid_inspect_dlp_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_hybrid_inspect_dlp_job_flattened_error_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.hybrid_inspect_dlp_job( - dlp.HybridInspectDlpJobRequest(), - name='name_value', - ) - - -def test_finish_dlp_job(transport: str = 'grpc', request_type=dlp.FinishDlpJobRequest): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.finish_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.finish_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.FinishDlpJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_finish_dlp_job_from_dict(): - test_finish_dlp_job(request_type=dict) - - -def test_finish_dlp_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.finish_dlp_job), - '__call__') as call: - client.finish_dlp_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.FinishDlpJobRequest() - - -@pytest.mark.asyncio -async def test_finish_dlp_job_async(transport: str = 'grpc_asyncio', request_type=dlp.FinishDlpJobRequest): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.finish_dlp_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.finish_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == dlp.FinishDlpJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_finish_dlp_job_async_from_dict(): - await test_finish_dlp_job_async(request_type=dict) - - -def test_finish_dlp_job_field_headers(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.FinishDlpJobRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.finish_dlp_job), - '__call__') as call: - call.return_value = None - client.finish_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_finish_dlp_job_field_headers_async(): - client = DlpServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = dlp.FinishDlpJobRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.finish_dlp_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.finish_dlp_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.DlpServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.DlpServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DlpServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.DlpServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = DlpServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.DlpServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = DlpServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.DlpServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.DlpServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.DlpServiceGrpcTransport, - transports.DlpServiceGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.DlpServiceGrpcTransport, - ) - -def test_dlp_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.DlpServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_dlp_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.DlpServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'inspect_content', - 'redact_image', - 'deidentify_content', - 'reidentify_content', - 'list_info_types', - 'create_inspect_template', - 'update_inspect_template', - 'get_inspect_template', - 'list_inspect_templates', - 'delete_inspect_template', - 'create_deidentify_template', - 'update_deidentify_template', - 'get_deidentify_template', - 'list_deidentify_templates', - 'delete_deidentify_template', - 'create_job_trigger', - 'update_job_trigger', - 'hybrid_inspect_job_trigger', - 'get_job_trigger', - 'list_job_triggers', - 'delete_job_trigger', - 'activate_job_trigger', - 'create_dlp_job', - 'list_dlp_jobs', - 'get_dlp_job', - 'delete_dlp_job', - 'cancel_dlp_job', - 'create_stored_info_type', - 'update_stored_info_type', - 'get_stored_info_type', - 'list_stored_info_types', - 'delete_stored_info_type', - 'hybrid_inspect_dlp_job', - 'finish_dlp_job', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - -@requires_google_auth_gte_1_25_0 -def test_dlp_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DlpServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -@requires_google_auth_lt_1_25_0 -def test_dlp_service_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DlpServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), - quota_project_id="octopus", - ) - - -def test_dlp_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.dlp_v2.services.dlp_service.transports.DlpServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.DlpServiceTransport() - adc.assert_called_once() - - -@requires_google_auth_gte_1_25_0 -def test_dlp_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - DlpServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@requires_google_auth_lt_1_25_0 -def test_dlp_service_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - DlpServiceClient() - adc.assert_called_once_with( - scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DlpServiceGrpcTransport, - transports.DlpServiceGrpcAsyncIOTransport, - ], -) -@requires_google_auth_gte_1_25_0 -def test_dlp_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.DlpServiceGrpcTransport, - transports.DlpServiceGrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_dlp_service_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with(scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.DlpServiceGrpcTransport, grpc_helpers), - (transports.DlpServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_dlp_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "dlp.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="dlp.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport]) -def test_dlp_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -def test_dlp_service_host_no_port(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dlp.googleapis.com'), - ) - assert client.transport._host == 'dlp.googleapis.com:443' - - -def test_dlp_service_host_with_port(): - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='dlp.googleapis.com:8000'), - ) - assert client.transport._host == 'dlp.googleapis.com:8000' - -def test_dlp_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DlpServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_dlp_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.DlpServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport]) -def test_dlp_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.DlpServiceGrpcTransport, transports.DlpServiceGrpcAsyncIOTransport]) -def test_dlp_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_deidentify_template_path(): - organization = "squid" - deidentify_template = "clam" - expected = "organizations/{organization}/deidentifyTemplates/{deidentify_template}".format(organization=organization, deidentify_template=deidentify_template, ) - actual = DlpServiceClient.deidentify_template_path(organization, deidentify_template) - assert expected == actual - - -def test_parse_deidentify_template_path(): - expected = { - "organization": "whelk", - "deidentify_template": "octopus", - } - path = DlpServiceClient.deidentify_template_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_deidentify_template_path(path) - assert expected == actual - -def test_dlp_content_path(): - project = "oyster" - expected = "projects/{project}/dlpContent".format(project=project, ) - actual = DlpServiceClient.dlp_content_path(project) - assert expected == actual - - -def test_parse_dlp_content_path(): - expected = { - "project": "nudibranch", - } - path = DlpServiceClient.dlp_content_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_dlp_content_path(path) - assert expected == actual - -def test_dlp_job_path(): - project = "cuttlefish" - dlp_job = "mussel" - expected = "projects/{project}/dlpJobs/{dlp_job}".format(project=project, dlp_job=dlp_job, ) - actual = DlpServiceClient.dlp_job_path(project, dlp_job) - assert expected == actual - - -def test_parse_dlp_job_path(): - expected = { - "project": "winkle", - "dlp_job": "nautilus", - } - path = DlpServiceClient.dlp_job_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_dlp_job_path(path) - assert expected == actual - -def test_finding_path(): - project = "scallop" - location = "abalone" - finding = "squid" - expected = "projects/{project}/locations/{location}/findings/{finding}".format(project=project, location=location, finding=finding, ) - actual = DlpServiceClient.finding_path(project, location, finding) - assert expected == actual - - -def test_parse_finding_path(): - expected = { - "project": "clam", - "location": "whelk", - "finding": "octopus", - } - path = DlpServiceClient.finding_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_finding_path(path) - assert expected == actual - -def test_inspect_template_path(): - organization = "oyster" - inspect_template = "nudibranch" - expected = "organizations/{organization}/inspectTemplates/{inspect_template}".format(organization=organization, inspect_template=inspect_template, ) - actual = DlpServiceClient.inspect_template_path(organization, inspect_template) - assert expected == actual - - -def test_parse_inspect_template_path(): - expected = { - "organization": "cuttlefish", - "inspect_template": "mussel", - } - path = DlpServiceClient.inspect_template_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_inspect_template_path(path) - assert expected == actual - -def test_job_trigger_path(): - project = "winkle" - job_trigger = "nautilus" - expected = "projects/{project}/jobTriggers/{job_trigger}".format(project=project, job_trigger=job_trigger, ) - actual = DlpServiceClient.job_trigger_path(project, job_trigger) - assert expected == actual - - -def test_parse_job_trigger_path(): - expected = { - "project": "scallop", - "job_trigger": "abalone", - } - path = DlpServiceClient.job_trigger_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_job_trigger_path(path) - assert expected == actual - -def test_stored_info_type_path(): - organization = "squid" - stored_info_type = "clam" - expected = "organizations/{organization}/storedInfoTypes/{stored_info_type}".format(organization=organization, stored_info_type=stored_info_type, ) - actual = DlpServiceClient.stored_info_type_path(organization, stored_info_type) - assert expected == actual - - -def test_parse_stored_info_type_path(): - expected = { - "organization": "whelk", - "stored_info_type": "octopus", - } - path = DlpServiceClient.stored_info_type_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_stored_info_type_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "oyster" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = DlpServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "nudibranch", - } - path = DlpServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "cuttlefish" - expected = "folders/{folder}".format(folder=folder, ) - actual = DlpServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "mussel", - } - path = DlpServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "winkle" - expected = "organizations/{organization}".format(organization=organization, ) - actual = DlpServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nautilus", - } - path = DlpServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "scallop" - expected = "projects/{project}".format(project=project, ) - actual = DlpServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "abalone", - } - path = DlpServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "squid" - location = "clam" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = DlpServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "whelk", - "location": "octopus", - } - path = DlpServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = DlpServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_withDEFAULT_CLIENT_INFO(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.DlpServiceTransport, '_prep_wrapped_messages') as prep: - client = DlpServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.DlpServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = DlpServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/dlp_v2/test_dlp_service.py b/tests/unit/gapic/dlp_v2/test_dlp_service.py index 129ca631..cbcb8a45 100644 --- a/tests/unit/gapic/dlp_v2/test_dlp_service.py +++ b/tests/unit/gapic/dlp_v2/test_dlp_service.py @@ -129,7 +129,25 @@ def test_dlp_service_client_service_account_always_use_jwt(client_class): ) as use_jwt: creds = service_account.Credentials(None, None, None) client = client_class(credentials=creds) - use_jwt.assert_called_with(True) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.DlpServiceGrpcTransport, "grpc"), + (transports.DlpServiceGrpcAsyncIOTransport, "grpc_asyncio"), + ], +) +def test_dlp_service_client_service_account_always_use_jwt_true( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) @pytest.mark.parametrize("client_class", [DlpServiceClient, DlpServiceAsyncClient,]) @@ -8268,7 +8286,7 @@ def test_dlp_service_grpc_transport_client_cert_source_for_mtls(transport_class) "squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, ssl_credentials=mock_ssl_channel_creds, quota_project_id=None, options=[ @@ -8370,7 +8388,7 @@ def test_dlp_service_transport_channel_mtls_with_client_cert_source(transport_cl "mtls.squid.clam.whelk:443", credentials=cred, credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[ @@ -8414,7 +8432,7 @@ def test_dlp_service_transport_channel_mtls_with_adc(transport_class): "mtls.squid.clam.whelk:443", credentials=mock_cred, credentials_file=None, - scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, ssl_credentials=mock_ssl_cred, quota_project_id=None, options=[