From 8b966c0e54bc89353a25be905f405e91e4578437 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 22 Jun 2021 00:30:18 +0000 Subject: [PATCH 1/2] chore: remove all monolith Bazel deps chore: release gapic-generator-csharp v1.3.7 chore: release gapic-generator-go 0.20.5 chore: release gapic-generator-java 1.0.14 chore: release gapic-generator-php 1.0.1 chore: release gapic-generator-python 0.50.0 chore: update gapic-generator-ruby to the latest commit chore: release gapic-generator-typescript 1.5.0 Committer: @miraleung PiperOrigin-RevId: 380641501 Source-Link: https://github.com/googleapis/googleapis/commit/076f7e9f0b258bdb54338895d7251b202e8f0de3 Source-Link: https://github.com/googleapis/googleapis-gen/commit/27e4c88b4048e5f56508d4e1aa417d60a3380892 --- owl-bot-staging/v1/.coveragerc | 17 + owl-bot-staging/v1/MANIFEST.in | 2 + owl-bot-staging/v1/README.rst | 49 + .../v1/docs/bigquery_logging_v1/services.rst | 4 + .../v1/docs/bigquery_logging_v1/types.rst | 7 + owl-bot-staging/v1/docs/conf.py | 376 +++++ owl-bot-staging/v1/docs/index.rst | 7 + .../google/cloud/bigquery_logging/__init__.py | 89 ++ .../v1/google/cloud/bigquery_logging/py.typed | 2 + .../cloud/bigquery_logging_v1/__init__.py | 90 ++ .../bigquery_logging_v1/gapic_metadata.json | 7 + .../google/cloud/bigquery_logging_v1/py.typed | 2 + .../bigquery_logging_v1/services/__init__.py | 15 + .../bigquery_logging_v1/types/__init__.py | 90 ++ .../bigquery_logging_v1/types/audit_data.py | 1393 +++++++++++++++++ owl-bot-staging/v1/mypy.ini | 3 + owl-bot-staging/v1/noxfile.py | 132 ++ .../fixup_bigquery_logging_v1_keywords.py | 175 +++ owl-bot-staging/v1/setup.py | 53 + owl-bot-staging/v1/tests/__init__.py | 16 + owl-bot-staging/v1/tests/unit/__init__.py | 16 + .../v1/tests/unit/gapic/__init__.py | 16 + .../gapic/bigquery_logging_v1/__init__.py | 16 + 23 files changed, 2577 insertions(+) create mode 100644 owl-bot-staging/v1/.coveragerc create mode 100644 owl-bot-staging/v1/MANIFEST.in create mode 100644 owl-bot-staging/v1/README.rst create mode 100644 owl-bot-staging/v1/docs/bigquery_logging_v1/services.rst create mode 100644 owl-bot-staging/v1/docs/bigquery_logging_v1/types.rst create mode 100644 owl-bot-staging/v1/docs/conf.py create mode 100644 owl-bot-staging/v1/docs/index.rst create mode 100644 owl-bot-staging/v1/google/cloud/bigquery_logging/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/bigquery_logging/py.typed create mode 100644 owl-bot-staging/v1/google/cloud/bigquery_logging_v1/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/bigquery_logging_v1/gapic_metadata.json create mode 100644 owl-bot-staging/v1/google/cloud/bigquery_logging_v1/py.typed create mode 100644 owl-bot-staging/v1/google/cloud/bigquery_logging_v1/services/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/bigquery_logging_v1/types/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/bigquery_logging_v1/types/audit_data.py create mode 100644 owl-bot-staging/v1/mypy.ini create mode 100644 owl-bot-staging/v1/noxfile.py create mode 100644 owl-bot-staging/v1/scripts/fixup_bigquery_logging_v1_keywords.py create mode 100644 owl-bot-staging/v1/setup.py create mode 100644 owl-bot-staging/v1/tests/__init__.py create mode 100644 owl-bot-staging/v1/tests/unit/__init__.py create mode 100644 owl-bot-staging/v1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/v1/tests/unit/gapic/bigquery_logging_v1/__init__.py diff --git a/owl-bot-staging/v1/.coveragerc b/owl-bot-staging/v1/.coveragerc new file mode 100644 index 0000000..2ae3ab8 --- /dev/null +++ b/owl-bot-staging/v1/.coveragerc @@ -0,0 +1,17 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/bigquery_logging/__init__.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound diff --git a/owl-bot-staging/v1/MANIFEST.in b/owl-bot-staging/v1/MANIFEST.in new file mode 100644 index 0000000..e3b1309 --- /dev/null +++ b/owl-bot-staging/v1/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/bigquery_logging *.py +recursive-include google/cloud/bigquery_logging_v1 *.py diff --git a/owl-bot-staging/v1/README.rst b/owl-bot-staging/v1/README.rst new file mode 100644 index 0000000..223a1f6 --- /dev/null +++ b/owl-bot-staging/v1/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Bigquery Logging API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Bigquery Logging API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v1/docs/bigquery_logging_v1/services.rst b/owl-bot-staging/v1/docs/bigquery_logging_v1/services.rst new file mode 100644 index 0000000..b54c5c1 --- /dev/null +++ b/owl-bot-staging/v1/docs/bigquery_logging_v1/services.rst @@ -0,0 +1,4 @@ +Services for Google Cloud Bigquery Logging v1 API +================================================= +.. toctree:: + :maxdepth: 2 diff --git a/owl-bot-staging/v1/docs/bigquery_logging_v1/types.rst b/owl-bot-staging/v1/docs/bigquery_logging_v1/types.rst new file mode 100644 index 0000000..bba03ca --- /dev/null +++ b/owl-bot-staging/v1/docs/bigquery_logging_v1/types.rst @@ -0,0 +1,7 @@ +Types for Google Cloud Bigquery Logging v1 API +============================================== + +.. automodule:: google.cloud.bigquery_logging_v1.types + :members: + :undoc-members: + :show-inheritance: diff --git a/owl-bot-staging/v1/docs/conf.py b/owl-bot-staging/v1/docs/conf.py new file mode 100644 index 0000000..9718c40 --- /dev/null +++ b/owl-bot-staging/v1/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-bigquery-logging documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.6.3" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = "index" + +# General information about the project. +project = u"google-cloud-bigquery-logging" +copyright = u"2020, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-bigquery-logging-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + master_doc, + "google-cloud-bigquery-logging.tex", + u"google-cloud-bigquery-logging Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + master_doc, + "google-cloud-bigquery-logging", + u"Google Cloud Bigquery Logging Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + master_doc, + "google-cloud-bigquery-logging", + u"google-cloud-bigquery-logging Documentation", + author, + "google-cloud-bigquery-logging", + "GAPIC library for Google Cloud Bigquery Logging API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/v1/docs/index.rst b/owl-bot-staging/v1/docs/index.rst new file mode 100644 index 0000000..e3f5c51 --- /dev/null +++ b/owl-bot-staging/v1/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + bigquery_logging_v1/services + bigquery_logging_v1/types diff --git a/owl-bot-staging/v1/google/cloud/bigquery_logging/__init__.py b/owl-bot-staging/v1/google/cloud/bigquery_logging/__init__.py new file mode 100644 index 0000000..01793a6 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/bigquery_logging/__init__.py @@ -0,0 +1,89 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +from google.cloud.bigquery_logging_v1.types.audit_data import AuditData +from google.cloud.bigquery_logging_v1.types.audit_data import BigQueryAcl +from google.cloud.bigquery_logging_v1.types.audit_data import Dataset +from google.cloud.bigquery_logging_v1.types.audit_data import DatasetInfo +from google.cloud.bigquery_logging_v1.types.audit_data import DatasetInsertRequest +from google.cloud.bigquery_logging_v1.types.audit_data import DatasetInsertResponse +from google.cloud.bigquery_logging_v1.types.audit_data import DatasetListRequest +from google.cloud.bigquery_logging_v1.types.audit_data import DatasetName +from google.cloud.bigquery_logging_v1.types.audit_data import DatasetUpdateRequest +from google.cloud.bigquery_logging_v1.types.audit_data import DatasetUpdateResponse +from google.cloud.bigquery_logging_v1.types.audit_data import EncryptionInfo +from google.cloud.bigquery_logging_v1.types.audit_data import Job +from google.cloud.bigquery_logging_v1.types.audit_data import JobCompletedEvent +from google.cloud.bigquery_logging_v1.types.audit_data import JobConfiguration +from google.cloud.bigquery_logging_v1.types.audit_data import JobGetQueryResultsRequest +from google.cloud.bigquery_logging_v1.types.audit_data import JobGetQueryResultsResponse +from google.cloud.bigquery_logging_v1.types.audit_data import JobInsertRequest +from google.cloud.bigquery_logging_v1.types.audit_data import JobInsertResponse +from google.cloud.bigquery_logging_v1.types.audit_data import JobName +from google.cloud.bigquery_logging_v1.types.audit_data import JobQueryDoneResponse +from google.cloud.bigquery_logging_v1.types.audit_data import JobQueryRequest +from google.cloud.bigquery_logging_v1.types.audit_data import JobQueryResponse +from google.cloud.bigquery_logging_v1.types.audit_data import JobStatistics +from google.cloud.bigquery_logging_v1.types.audit_data import JobStatus +from google.cloud.bigquery_logging_v1.types.audit_data import Table +from google.cloud.bigquery_logging_v1.types.audit_data import TableDataListRequest +from google.cloud.bigquery_logging_v1.types.audit_data import TableDataReadEvent +from google.cloud.bigquery_logging_v1.types.audit_data import TableDefinition +from google.cloud.bigquery_logging_v1.types.audit_data import TableInfo +from google.cloud.bigquery_logging_v1.types.audit_data import TableInsertRequest +from google.cloud.bigquery_logging_v1.types.audit_data import TableInsertResponse +from google.cloud.bigquery_logging_v1.types.audit_data import TableName +from google.cloud.bigquery_logging_v1.types.audit_data import TableUpdateRequest +from google.cloud.bigquery_logging_v1.types.audit_data import TableUpdateResponse +from google.cloud.bigquery_logging_v1.types.audit_data import TableViewDefinition + +__all__ = ('AuditData', + 'BigQueryAcl', + 'Dataset', + 'DatasetInfo', + 'DatasetInsertRequest', + 'DatasetInsertResponse', + 'DatasetListRequest', + 'DatasetName', + 'DatasetUpdateRequest', + 'DatasetUpdateResponse', + 'EncryptionInfo', + 'Job', + 'JobCompletedEvent', + 'JobConfiguration', + 'JobGetQueryResultsRequest', + 'JobGetQueryResultsResponse', + 'JobInsertRequest', + 'JobInsertResponse', + 'JobName', + 'JobQueryDoneResponse', + 'JobQueryRequest', + 'JobQueryResponse', + 'JobStatistics', + 'JobStatus', + 'Table', + 'TableDataListRequest', + 'TableDataReadEvent', + 'TableDefinition', + 'TableInfo', + 'TableInsertRequest', + 'TableInsertResponse', + 'TableName', + 'TableUpdateRequest', + 'TableUpdateResponse', + 'TableViewDefinition', +) diff --git a/owl-bot-staging/v1/google/cloud/bigquery_logging/py.typed b/owl-bot-staging/v1/google/cloud/bigquery_logging/py.typed new file mode 100644 index 0000000..8cff817 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/bigquery_logging/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-bigquery-logging package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/__init__.py b/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/__init__.py new file mode 100644 index 0000000..2f721d4 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/__init__.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +from .types.audit_data import AuditData +from .types.audit_data import BigQueryAcl +from .types.audit_data import Dataset +from .types.audit_data import DatasetInfo +from .types.audit_data import DatasetInsertRequest +from .types.audit_data import DatasetInsertResponse +from .types.audit_data import DatasetListRequest +from .types.audit_data import DatasetName +from .types.audit_data import DatasetUpdateRequest +from .types.audit_data import DatasetUpdateResponse +from .types.audit_data import EncryptionInfo +from .types.audit_data import Job +from .types.audit_data import JobCompletedEvent +from .types.audit_data import JobConfiguration +from .types.audit_data import JobGetQueryResultsRequest +from .types.audit_data import JobGetQueryResultsResponse +from .types.audit_data import JobInsertRequest +from .types.audit_data import JobInsertResponse +from .types.audit_data import JobName +from .types.audit_data import JobQueryDoneResponse +from .types.audit_data import JobQueryRequest +from .types.audit_data import JobQueryResponse +from .types.audit_data import JobStatistics +from .types.audit_data import JobStatus +from .types.audit_data import Table +from .types.audit_data import TableDataListRequest +from .types.audit_data import TableDataReadEvent +from .types.audit_data import TableDefinition +from .types.audit_data import TableInfo +from .types.audit_data import TableInsertRequest +from .types.audit_data import TableInsertResponse +from .types.audit_data import TableName +from .types.audit_data import TableUpdateRequest +from .types.audit_data import TableUpdateResponse +from .types.audit_data import TableViewDefinition + +__all__ = ( +'AuditData', +'BigQueryAcl', +'Dataset', +'DatasetInfo', +'DatasetInsertRequest', +'DatasetInsertResponse', +'DatasetListRequest', +'DatasetName', +'DatasetUpdateRequest', +'DatasetUpdateResponse', +'EncryptionInfo', +'Job', +'JobCompletedEvent', +'JobConfiguration', +'JobGetQueryResultsRequest', +'JobGetQueryResultsResponse', +'JobInsertRequest', +'JobInsertResponse', +'JobName', +'JobQueryDoneResponse', +'JobQueryRequest', +'JobQueryResponse', +'JobStatistics', +'JobStatus', +'Table', +'TableDataListRequest', +'TableDataReadEvent', +'TableDefinition', +'TableInfo', +'TableInsertRequest', +'TableInsertResponse', +'TableName', +'TableUpdateRequest', +'TableUpdateResponse', +'TableViewDefinition', +) diff --git a/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/gapic_metadata.json b/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/gapic_metadata.json new file mode 100644 index 0000000..5c730dc --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/gapic_metadata.json @@ -0,0 +1,7 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.bigquery_logging_v1", + "protoPackage": "google.cloud.bigquery.logging.v1", + "schema": "1.0" +} diff --git a/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/py.typed b/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/py.typed new file mode 100644 index 0000000..8cff817 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-bigquery-logging package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/services/__init__.py b/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/services/__init__.py new file mode 100644 index 0000000..4de6597 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/types/__init__.py b/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/types/__init__.py new file mode 100644 index 0000000..19d6d96 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/types/__init__.py @@ -0,0 +1,90 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .audit_data import ( + AuditData, + BigQueryAcl, + Dataset, + DatasetInfo, + DatasetInsertRequest, + DatasetInsertResponse, + DatasetListRequest, + DatasetName, + DatasetUpdateRequest, + DatasetUpdateResponse, + EncryptionInfo, + Job, + JobCompletedEvent, + JobConfiguration, + JobGetQueryResultsRequest, + JobGetQueryResultsResponse, + JobInsertRequest, + JobInsertResponse, + JobName, + JobQueryDoneResponse, + JobQueryRequest, + JobQueryResponse, + JobStatistics, + JobStatus, + Table, + TableDataListRequest, + TableDataReadEvent, + TableDefinition, + TableInfo, + TableInsertRequest, + TableInsertResponse, + TableName, + TableUpdateRequest, + TableUpdateResponse, + TableViewDefinition, +) + +__all__ = ( + 'AuditData', + 'BigQueryAcl', + 'Dataset', + 'DatasetInfo', + 'DatasetInsertRequest', + 'DatasetInsertResponse', + 'DatasetListRequest', + 'DatasetName', + 'DatasetUpdateRequest', + 'DatasetUpdateResponse', + 'EncryptionInfo', + 'Job', + 'JobCompletedEvent', + 'JobConfiguration', + 'JobGetQueryResultsRequest', + 'JobGetQueryResultsResponse', + 'JobInsertRequest', + 'JobInsertResponse', + 'JobName', + 'JobQueryDoneResponse', + 'JobQueryRequest', + 'JobQueryResponse', + 'JobStatistics', + 'JobStatus', + 'Table', + 'TableDataListRequest', + 'TableDataReadEvent', + 'TableDefinition', + 'TableInfo', + 'TableInsertRequest', + 'TableInsertResponse', + 'TableName', + 'TableUpdateRequest', + 'TableUpdateResponse', + 'TableViewDefinition', +) diff --git a/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/types/audit_data.py b/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/types/audit_data.py new file mode 100644 index 0000000..6c14bdb --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/types/audit_data.py @@ -0,0 +1,1393 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.iam.v1 import iam_policy_pb2 # type: ignore +from google.iam.v1 import policy_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.bigquery.logging.v1', + manifest={ + 'AuditData', + 'TableInsertRequest', + 'TableUpdateRequest', + 'TableInsertResponse', + 'TableUpdateResponse', + 'DatasetListRequest', + 'DatasetInsertRequest', + 'DatasetInsertResponse', + 'DatasetUpdateRequest', + 'DatasetUpdateResponse', + 'JobInsertRequest', + 'JobInsertResponse', + 'JobQueryRequest', + 'JobQueryResponse', + 'JobGetQueryResultsRequest', + 'JobGetQueryResultsResponse', + 'JobQueryDoneResponse', + 'JobCompletedEvent', + 'TableDataReadEvent', + 'TableDataListRequest', + 'Table', + 'TableInfo', + 'TableViewDefinition', + 'Dataset', + 'DatasetInfo', + 'BigQueryAcl', + 'Job', + 'JobConfiguration', + 'TableDefinition', + 'JobStatus', + 'JobStatistics', + 'DatasetName', + 'TableName', + 'JobName', + 'EncryptionInfo', + }, +) + + +class AuditData(proto.Message): + r"""BigQuery request and response messages for audit log. Note: + ``Table.schema`` has been deprecated in favor of + ``Table.schemaJson``. ``Table.schema`` may continue to be present in + your logs during this transition. + + Attributes: + table_insert_request (google.cloud.bigquery_logging_v1.types.TableInsertRequest): + Table insert request. + table_update_request (google.cloud.bigquery_logging_v1.types.TableUpdateRequest): + Table update request. + dataset_list_request (google.cloud.bigquery_logging_v1.types.DatasetListRequest): + Dataset list request. + dataset_insert_request (google.cloud.bigquery_logging_v1.types.DatasetInsertRequest): + Dataset insert request. + dataset_update_request (google.cloud.bigquery_logging_v1.types.DatasetUpdateRequest): + Dataset update request. + job_insert_request (google.cloud.bigquery_logging_v1.types.JobInsertRequest): + Job insert request. + job_query_request (google.cloud.bigquery_logging_v1.types.JobQueryRequest): + Job query request. + job_get_query_results_request (google.cloud.bigquery_logging_v1.types.JobGetQueryResultsRequest): + Job get query results request. + table_data_list_request (google.cloud.bigquery_logging_v1.types.TableDataListRequest): + Table data-list request. + set_iam_policy_request (google.iam.v1.iam_policy_pb2.SetIamPolicyRequest): + Iam policy request. + table_insert_response (google.cloud.bigquery_logging_v1.types.TableInsertResponse): + Table insert response. + table_update_response (google.cloud.bigquery_logging_v1.types.TableUpdateResponse): + Table update response. + dataset_insert_response (google.cloud.bigquery_logging_v1.types.DatasetInsertResponse): + Dataset insert response. + dataset_update_response (google.cloud.bigquery_logging_v1.types.DatasetUpdateResponse): + Dataset update response. + job_insert_response (google.cloud.bigquery_logging_v1.types.JobInsertResponse): + Job insert response. + job_query_response (google.cloud.bigquery_logging_v1.types.JobQueryResponse): + Job query response. + job_get_query_results_response (google.cloud.bigquery_logging_v1.types.JobGetQueryResultsResponse): + Job get query results response. + job_query_done_response (google.cloud.bigquery_logging_v1.types.JobQueryDoneResponse): + Deprecated: Job query-done response. Use this + information for usage analysis. + policy_response (google.iam.v1.policy_pb2.Policy): + Iam Policy. + job_completed_event (google.cloud.bigquery_logging_v1.types.JobCompletedEvent): + A job completion event. + table_data_read_events (Sequence[google.cloud.bigquery_logging_v1.types.TableDataReadEvent]): + Information about the table access events. + """ + + table_insert_request = proto.Field( + proto.MESSAGE, + number=1, + oneof='request', + message='TableInsertRequest', + ) + table_update_request = proto.Field( + proto.MESSAGE, + number=16, + oneof='request', + message='TableUpdateRequest', + ) + dataset_list_request = proto.Field( + proto.MESSAGE, + number=2, + oneof='request', + message='DatasetListRequest', + ) + dataset_insert_request = proto.Field( + proto.MESSAGE, + number=3, + oneof='request', + message='DatasetInsertRequest', + ) + dataset_update_request = proto.Field( + proto.MESSAGE, + number=4, + oneof='request', + message='DatasetUpdateRequest', + ) + job_insert_request = proto.Field( + proto.MESSAGE, + number=5, + oneof='request', + message='JobInsertRequest', + ) + job_query_request = proto.Field( + proto.MESSAGE, + number=6, + oneof='request', + message='JobQueryRequest', + ) + job_get_query_results_request = proto.Field( + proto.MESSAGE, + number=7, + oneof='request', + message='JobGetQueryResultsRequest', + ) + table_data_list_request = proto.Field( + proto.MESSAGE, + number=8, + oneof='request', + message='TableDataListRequest', + ) + set_iam_policy_request = proto.Field( + proto.MESSAGE, + number=20, + oneof='request', + message=iam_policy_pb2.SetIamPolicyRequest, + ) + table_insert_response = proto.Field( + proto.MESSAGE, + number=9, + oneof='response', + message='TableInsertResponse', + ) + table_update_response = proto.Field( + proto.MESSAGE, + number=10, + oneof='response', + message='TableUpdateResponse', + ) + dataset_insert_response = proto.Field( + proto.MESSAGE, + number=11, + oneof='response', + message='DatasetInsertResponse', + ) + dataset_update_response = proto.Field( + proto.MESSAGE, + number=12, + oneof='response', + message='DatasetUpdateResponse', + ) + job_insert_response = proto.Field( + proto.MESSAGE, + number=18, + oneof='response', + message='JobInsertResponse', + ) + job_query_response = proto.Field( + proto.MESSAGE, + number=13, + oneof='response', + message='JobQueryResponse', + ) + job_get_query_results_response = proto.Field( + proto.MESSAGE, + number=14, + oneof='response', + message='JobGetQueryResultsResponse', + ) + job_query_done_response = proto.Field( + proto.MESSAGE, + number=15, + oneof='response', + message='JobQueryDoneResponse', + ) + policy_response = proto.Field( + proto.MESSAGE, + number=21, + oneof='response', + message=policy_pb2.Policy, + ) + job_completed_event = proto.Field( + proto.MESSAGE, + number=17, + message='JobCompletedEvent', + ) + table_data_read_events = proto.RepeatedField( + proto.MESSAGE, + number=19, + message='TableDataReadEvent', + ) + + +class TableInsertRequest(proto.Message): + r"""Table insert request. + Attributes: + resource (google.cloud.bigquery_logging_v1.types.Table): + The new table. + """ + + resource = proto.Field( + proto.MESSAGE, + number=1, + message='Table', + ) + + +class TableUpdateRequest(proto.Message): + r"""Table update request. + Attributes: + resource (google.cloud.bigquery_logging_v1.types.Table): + The table to be updated. + """ + + resource = proto.Field( + proto.MESSAGE, + number=1, + message='Table', + ) + + +class TableInsertResponse(proto.Message): + r"""Table insert response. + Attributes: + resource (google.cloud.bigquery_logging_v1.types.Table): + Final state of the inserted table. + """ + + resource = proto.Field( + proto.MESSAGE, + number=1, + message='Table', + ) + + +class TableUpdateResponse(proto.Message): + r"""Table update response. + Attributes: + resource (google.cloud.bigquery_logging_v1.types.Table): + Final state of the updated table. + """ + + resource = proto.Field( + proto.MESSAGE, + number=1, + message='Table', + ) + + +class DatasetListRequest(proto.Message): + r"""Dataset list request. + Attributes: + list_all (bool): + Whether to list all datasets, including + hidden ones. + """ + + list_all = proto.Field( + proto.BOOL, + number=1, + ) + + +class DatasetInsertRequest(proto.Message): + r"""Dataset insert request. + Attributes: + resource (google.cloud.bigquery_logging_v1.types.Dataset): + The dataset to be inserted. + """ + + resource = proto.Field( + proto.MESSAGE, + number=1, + message='Dataset', + ) + + +class DatasetInsertResponse(proto.Message): + r"""Dataset insert response. + Attributes: + resource (google.cloud.bigquery_logging_v1.types.Dataset): + Final state of the inserted dataset. + """ + + resource = proto.Field( + proto.MESSAGE, + number=1, + message='Dataset', + ) + + +class DatasetUpdateRequest(proto.Message): + r"""Dataset update request. + Attributes: + resource (google.cloud.bigquery_logging_v1.types.Dataset): + The dataset to be updated. + """ + + resource = proto.Field( + proto.MESSAGE, + number=1, + message='Dataset', + ) + + +class DatasetUpdateResponse(proto.Message): + r"""Dataset update response. + Attributes: + resource (google.cloud.bigquery_logging_v1.types.Dataset): + Final state of the updated dataset. + """ + + resource = proto.Field( + proto.MESSAGE, + number=1, + message='Dataset', + ) + + +class JobInsertRequest(proto.Message): + r"""Job insert request. + Attributes: + resource (google.cloud.bigquery_logging_v1.types.Job): + Job insert request. + """ + + resource = proto.Field( + proto.MESSAGE, + number=1, + message='Job', + ) + + +class JobInsertResponse(proto.Message): + r"""Job insert response. + Attributes: + resource (google.cloud.bigquery_logging_v1.types.Job): + Job insert response. + """ + + resource = proto.Field( + proto.MESSAGE, + number=1, + message='Job', + ) + + +class JobQueryRequest(proto.Message): + r"""Job query request. + Attributes: + query (str): + The query. + max_results (int): + The maximum number of results. + default_dataset (google.cloud.bigquery_logging_v1.types.DatasetName): + The default dataset for tables that do not + have a dataset specified. + project_id (str): + Project that the query should be charged to. + dry_run (bool): + If true, don't actually run the job. Just + check that it would run. + """ + + query = proto.Field( + proto.STRING, + number=1, + ) + max_results = proto.Field( + proto.UINT32, + number=2, + ) + default_dataset = proto.Field( + proto.MESSAGE, + number=3, + message='DatasetName', + ) + project_id = proto.Field( + proto.STRING, + number=4, + ) + dry_run = proto.Field( + proto.BOOL, + number=5, + ) + + +class JobQueryResponse(proto.Message): + r"""Job query response. + Attributes: + total_results (int): + The total number of rows in the full query + result set. + job (google.cloud.bigquery_logging_v1.types.Job): + Information about the queried job. + """ + + total_results = proto.Field( + proto.UINT64, + number=1, + ) + job = proto.Field( + proto.MESSAGE, + number=2, + message='Job', + ) + + +class JobGetQueryResultsRequest(proto.Message): + r"""Job getQueryResults request. + Attributes: + max_results (int): + Maximum number of results to return. + start_row (int): + Zero-based row number at which to start. + """ + + max_results = proto.Field( + proto.UINT32, + number=1, + ) + start_row = proto.Field( + proto.UINT64, + number=2, + ) + + +class JobGetQueryResultsResponse(proto.Message): + r"""Job getQueryResults response. + Attributes: + total_results (int): + Total number of results in query results. + job (google.cloud.bigquery_logging_v1.types.Job): + The job that was created to run the query. It completed if + ``job.status.state`` is ``DONE``. It failed if + ``job.status.errorResult`` is also present. + """ + + total_results = proto.Field( + proto.UINT64, + number=1, + ) + job = proto.Field( + proto.MESSAGE, + number=2, + message='Job', + ) + + +class JobQueryDoneResponse(proto.Message): + r"""Job getQueryDone response. + Attributes: + job (google.cloud.bigquery_logging_v1.types.Job): + The job and status information. The job completed if + ``job.status.state`` is ``DONE``. + """ + + job = proto.Field( + proto.MESSAGE, + number=1, + message='Job', + ) + + +class JobCompletedEvent(proto.Message): + r"""Query job completed event. + Attributes: + event_name (str): + Name of the event. + job (google.cloud.bigquery_logging_v1.types.Job): + Job information. + """ + + event_name = proto.Field( + proto.STRING, + number=1, + ) + job = proto.Field( + proto.MESSAGE, + number=2, + message='Job', + ) + + +class TableDataReadEvent(proto.Message): + r"""Table data read event. Only present for tables, not views, + and is only included in the log record for the project that owns + the table. + + Attributes: + table_name (google.cloud.bigquery_logging_v1.types.TableName): + Name of the accessed table. + referenced_fields (Sequence[str]): + A list of referenced fields. This information + is not included by default. To enable this in + the logs, please contact BigQuery support or + open a bug in the BigQuery issue tracker. + """ + + table_name = proto.Field( + proto.MESSAGE, + number=1, + message='TableName', + ) + referenced_fields = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class TableDataListRequest(proto.Message): + r"""Table data-list request. + Attributes: + start_row (int): + Starting row offset. + max_results (int): + Maximum number of results to return. + """ + + start_row = proto.Field( + proto.UINT64, + number=1, + ) + max_results = proto.Field( + proto.UINT32, + number=2, + ) + + +class Table(proto.Message): + r"""Describes a BigQuery table. See the + `Table `__ API resource for more + details on individual fields. Note: ``Table.schema`` has been + deprecated in favor of ``Table.schemaJson``. ``Table.schema`` may + continue to be present in your logs during this transition. + + Attributes: + table_name (google.cloud.bigquery_logging_v1.types.TableName): + The name of the table. + info (google.cloud.bigquery_logging_v1.types.TableInfo): + User-provided metadata for the table. + schema_json (str): + A JSON representation of the table's schema. + view (google.cloud.bigquery_logging_v1.types.TableViewDefinition): + If present, this is a virtual table defined + by a SQL query. + expire_time (google.protobuf.timestamp_pb2.Timestamp): + The expiration date for the table, after + which the table is deleted and the storage + reclaimed. If not present, the table persists + indefinitely. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time the table was created. + truncate_time (google.protobuf.timestamp_pb2.Timestamp): + The time the table was last truncated by an operation with a + ``writeDisposition`` of ``WRITE_TRUNCATE``. + update_time (google.protobuf.timestamp_pb2.Timestamp): + The time the table was last modified. + encryption (google.cloud.bigquery_logging_v1.types.EncryptionInfo): + The table encryption information. Set when + non-default encryption is used. + """ + + table_name = proto.Field( + proto.MESSAGE, + number=1, + message='TableName', + ) + info = proto.Field( + proto.MESSAGE, + number=2, + message='TableInfo', + ) + schema_json = proto.Field( + proto.STRING, + number=8, + ) + view = proto.Field( + proto.MESSAGE, + number=4, + message='TableViewDefinition', + ) + expire_time = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + create_time = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + truncate_time = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + update_time = proto.Field( + proto.MESSAGE, + number=9, + message=timestamp_pb2.Timestamp, + ) + encryption = proto.Field( + proto.MESSAGE, + number=10, + message='EncryptionInfo', + ) + + +class TableInfo(proto.Message): + r"""User-provided metadata for a table. + Attributes: + friendly_name (str): + A short name for the table, such + as\ ``"Analytics Data - Jan 2011"``. + description (str): + A long description, perhaps several + paragraphs, describing the table contents in + detail. + labels (Sequence[google.cloud.bigquery_logging_v1.types.TableInfo.LabelsEntry]): + Labels provided for the table. + """ + + friendly_name = proto.Field( + proto.STRING, + number=1, + ) + description = proto.Field( + proto.STRING, + number=2, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + + +class TableViewDefinition(proto.Message): + r"""Describes a virtual table defined by a SQL query. + Attributes: + query (str): + SQL query defining the view. + """ + + query = proto.Field( + proto.STRING, + number=1, + ) + + +class Dataset(proto.Message): + r"""BigQuery dataset information. See the + `Dataset `__ API resource for + more details on individual fields. + + Attributes: + dataset_name (google.cloud.bigquery_logging_v1.types.DatasetName): + The name of the dataset. + info (google.cloud.bigquery_logging_v1.types.DatasetInfo): + User-provided metadata for the dataset. + create_time (google.protobuf.timestamp_pb2.Timestamp): + The time the dataset was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + The time the dataset was last modified. + acl (google.cloud.bigquery_logging_v1.types.BigQueryAcl): + The access control list for the dataset. + default_table_expire_duration (google.protobuf.duration_pb2.Duration): + If this field is present, each table that does not specify + an expiration time is assigned an expiration time by adding + this duration to the table's ``createTime``. If this field + is empty, there is no default table expiration time. + """ + + dataset_name = proto.Field( + proto.MESSAGE, + number=1, + message='DatasetName', + ) + info = proto.Field( + proto.MESSAGE, + number=2, + message='DatasetInfo', + ) + create_time = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + acl = proto.Field( + proto.MESSAGE, + number=6, + message='BigQueryAcl', + ) + default_table_expire_duration = proto.Field( + proto.MESSAGE, + number=8, + message=duration_pb2.Duration, + ) + + +class DatasetInfo(proto.Message): + r"""User-provided metadata for a dataset. + Attributes: + friendly_name (str): + A short name for the dataset, such + as\ ``"Analytics Data 2011"``. + description (str): + A long description, perhaps several + paragraphs, describing the dataset contents in + detail. + labels (Sequence[google.cloud.bigquery_logging_v1.types.DatasetInfo.LabelsEntry]): + Labels provided for the dataset. + """ + + friendly_name = proto.Field( + proto.STRING, + number=1, + ) + description = proto.Field( + proto.STRING, + number=2, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + + +class BigQueryAcl(proto.Message): + r"""An access control list. + Attributes: + entries (Sequence[google.cloud.bigquery_logging_v1.types.BigQueryAcl.Entry]): + Access control entry list. + """ + + class Entry(proto.Message): + r"""Access control entry. + Attributes: + role (str): + The granted role, which can be ``READER``, ``WRITER``, or + ``OWNER``. + group_email (str): + Grants access to a group identified by an + email address. + user_email (str): + Grants access to a user identified by an + email address. + domain (str): + Grants access to all members of a domain. + special_group (str): + Grants access to special groups. Valid groups are + ``PROJECT_OWNERS``, ``PROJECT_READERS``, ``PROJECT_WRITERS`` + and ``ALL_AUTHENTICATED_USERS``. + view_name (google.cloud.bigquery_logging_v1.types.TableName): + Grants access to a BigQuery View. + """ + + role = proto.Field( + proto.STRING, + number=1, + ) + group_email = proto.Field( + proto.STRING, + number=2, + ) + user_email = proto.Field( + proto.STRING, + number=3, + ) + domain = proto.Field( + proto.STRING, + number=4, + ) + special_group = proto.Field( + proto.STRING, + number=5, + ) + view_name = proto.Field( + proto.MESSAGE, + number=6, + message='TableName', + ) + + entries = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=Entry, + ) + + +class Job(proto.Message): + r"""Describes a job. + Attributes: + job_name (google.cloud.bigquery_logging_v1.types.JobName): + Job name. + job_configuration (google.cloud.bigquery_logging_v1.types.JobConfiguration): + Job configuration. + job_status (google.cloud.bigquery_logging_v1.types.JobStatus): + Job status. + job_statistics (google.cloud.bigquery_logging_v1.types.JobStatistics): + Job statistics. + """ + + job_name = proto.Field( + proto.MESSAGE, + number=1, + message='JobName', + ) + job_configuration = proto.Field( + proto.MESSAGE, + number=2, + message='JobConfiguration', + ) + job_status = proto.Field( + proto.MESSAGE, + number=3, + message='JobStatus', + ) + job_statistics = proto.Field( + proto.MESSAGE, + number=4, + message='JobStatistics', + ) + + +class JobConfiguration(proto.Message): + r"""Job configuration information. See the + `Jobs `__ API resource for more + details on individual fields. + + Attributes: + query (google.cloud.bigquery_logging_v1.types.JobConfiguration.Query): + Query job information. + load (google.cloud.bigquery_logging_v1.types.JobConfiguration.Load): + Load job information. + extract (google.cloud.bigquery_logging_v1.types.JobConfiguration.Extract): + Extract job information. + table_copy (google.cloud.bigquery_logging_v1.types.JobConfiguration.TableCopy): + TableCopy job information. + dry_run (bool): + If true, don't actually run the job. Just + check that it would run. + labels (Sequence[google.cloud.bigquery_logging_v1.types.JobConfiguration.LabelsEntry]): + Labels provided for the job. + """ + + class Query(proto.Message): + r"""Describes a query job, which executes a SQL-like query. + Attributes: + query (str): + The SQL query to run. + destination_table (google.cloud.bigquery_logging_v1.types.TableName): + The table where results are written. + create_disposition (str): + Describes when a job is allowed to create a table: + ``CREATE_IF_NEEDED``, ``CREATE_NEVER``. + write_disposition (str): + Describes how writes affect existing tables: + ``WRITE_TRUNCATE``, ``WRITE_APPEND``, ``WRITE_EMPTY``. + default_dataset (google.cloud.bigquery_logging_v1.types.DatasetName): + If a table name is specified without a + dataset in a query, this dataset will be added + to table name. + table_definitions (Sequence[google.cloud.bigquery_logging_v1.types.TableDefinition]): + Describes data sources outside BigQuery, if + needed. + query_priority (str): + Describes the priority given to the query: + ``QUERY_INTERACTIVE`` or ``QUERY_BATCH``. + destination_table_encryption (google.cloud.bigquery_logging_v1.types.EncryptionInfo): + Result table encryption information. Set when + non-default encryption is used. + statement_type (str): + Type of the statement (e.g. SELECT, INSERT, CREATE_TABLE, + CREATE_MODEL..) + """ + + query = proto.Field( + proto.STRING, + number=1, + ) + destination_table = proto.Field( + proto.MESSAGE, + number=2, + message='TableName', + ) + create_disposition = proto.Field( + proto.STRING, + number=3, + ) + write_disposition = proto.Field( + proto.STRING, + number=4, + ) + default_dataset = proto.Field( + proto.MESSAGE, + number=5, + message='DatasetName', + ) + table_definitions = proto.RepeatedField( + proto.MESSAGE, + number=6, + message='TableDefinition', + ) + query_priority = proto.Field( + proto.STRING, + number=7, + ) + destination_table_encryption = proto.Field( + proto.MESSAGE, + number=8, + message='EncryptionInfo', + ) + statement_type = proto.Field( + proto.STRING, + number=9, + ) + + class Load(proto.Message): + r"""Describes a load job, which loads data from an external + source via the import pipeline. + + Attributes: + source_uris (Sequence[str]): + URIs for the data to be imported. Only Google + Cloud Storage URIs are supported. + schema_json (str): + The table schema in JSON format + representation of a TableSchema. + destination_table (google.cloud.bigquery_logging_v1.types.TableName): + The table where the imported data is written. + create_disposition (str): + Describes when a job is allowed to create a table: + ``CREATE_IF_NEEDED``, ``CREATE_NEVER``. + write_disposition (str): + Describes how writes affect existing tables: + ``WRITE_TRUNCATE``, ``WRITE_APPEND``, ``WRITE_EMPTY``. + destination_table_encryption (google.cloud.bigquery_logging_v1.types.EncryptionInfo): + Result table encryption information. Set when + non-default encryption is used. + """ + + source_uris = proto.RepeatedField( + proto.STRING, + number=1, + ) + schema_json = proto.Field( + proto.STRING, + number=6, + ) + destination_table = proto.Field( + proto.MESSAGE, + number=3, + message='TableName', + ) + create_disposition = proto.Field( + proto.STRING, + number=4, + ) + write_disposition = proto.Field( + proto.STRING, + number=5, + ) + destination_table_encryption = proto.Field( + proto.MESSAGE, + number=7, + message='EncryptionInfo', + ) + + class Extract(proto.Message): + r"""Describes an extract job, which exports data to an external + source via the export pipeline. + + Attributes: + destination_uris (Sequence[str]): + Google Cloud Storage URIs where extracted + data should be written. + source_table (google.cloud.bigquery_logging_v1.types.TableName): + The source table. + """ + + destination_uris = proto.RepeatedField( + proto.STRING, + number=1, + ) + source_table = proto.Field( + proto.MESSAGE, + number=2, + message='TableName', + ) + + class TableCopy(proto.Message): + r"""Describes a copy job, which copies an existing table to + another table. + + Attributes: + source_tables (Sequence[google.cloud.bigquery_logging_v1.types.TableName]): + Source tables. + destination_table (google.cloud.bigquery_logging_v1.types.TableName): + Destination table. + create_disposition (str): + Describes when a job is allowed to create a table: + ``CREATE_IF_NEEDED``, ``CREATE_NEVER``. + write_disposition (str): + Describes how writes affect existing tables: + ``WRITE_TRUNCATE``, ``WRITE_APPEND``, ``WRITE_EMPTY``. + destination_table_encryption (google.cloud.bigquery_logging_v1.types.EncryptionInfo): + Result table encryption information. Set when + non-default encryption is used. + """ + + source_tables = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='TableName', + ) + destination_table = proto.Field( + proto.MESSAGE, + number=2, + message='TableName', + ) + create_disposition = proto.Field( + proto.STRING, + number=3, + ) + write_disposition = proto.Field( + proto.STRING, + number=4, + ) + destination_table_encryption = proto.Field( + proto.MESSAGE, + number=5, + message='EncryptionInfo', + ) + + query = proto.Field( + proto.MESSAGE, + number=5, + oneof='configuration', + message=Query, + ) + load = proto.Field( + proto.MESSAGE, + number=6, + oneof='configuration', + message=Load, + ) + extract = proto.Field( + proto.MESSAGE, + number=7, + oneof='configuration', + message=Extract, + ) + table_copy = proto.Field( + proto.MESSAGE, + number=8, + oneof='configuration', + message=TableCopy, + ) + dry_run = proto.Field( + proto.BOOL, + number=9, + ) + labels = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + + +class TableDefinition(proto.Message): + r"""Describes an external data source used in a query. + Attributes: + name (str): + Name of the table, used in queries. + source_uris (Sequence[str]): + Google Cloud Storage URIs for the data to be + imported. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + source_uris = proto.RepeatedField( + proto.STRING, + number=2, + ) + + +class JobStatus(proto.Message): + r"""Running state of a job. + Attributes: + state (str): + State of a job: ``PENDING``, ``RUNNING``, or ``DONE``. + error (google.rpc.status_pb2.Status): + If the job did not complete successfully, + this field describes why. + additional_errors (Sequence[google.rpc.status_pb2.Status]): + Errors encountered during the running of the + job. Do not necessarily mean that the job has + completed or was unsuccessful. + """ + + state = proto.Field( + proto.STRING, + number=1, + ) + error = proto.Field( + proto.MESSAGE, + number=2, + message=status_pb2.Status, + ) + additional_errors = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=status_pb2.Status, + ) + + +class JobStatistics(proto.Message): + r"""Job statistics that may change after a job starts. + Attributes: + create_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the job was created. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the job started. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Time when the job ended. + total_processed_bytes (int): + Total bytes processed for a job. + total_billed_bytes (int): + Processed bytes, adjusted by the job's CPU + usage. + billing_tier (int): + The tier assigned by CPU-based billing. + total_slot_ms (int): + The total number of slot-ms consumed by the + query job. + reservation_usage (Sequence[google.cloud.bigquery_logging_v1.types.JobStatistics.ReservationResourceUsage]): + Reservation usage. + referenced_tables (Sequence[google.cloud.bigquery_logging_v1.types.TableName]): + The first N tables accessed by the query job. Older queries + that reference a large number of tables may not have all of + their tables in this list. You can use the + total_tables_processed count to know how many total tables + were read in the query. For new queries, there is currently + no limit. + total_tables_processed (int): + Total number of unique tables referenced in + the query. + referenced_views (Sequence[google.cloud.bigquery_logging_v1.types.TableName]): + The first N views accessed by the query job. Older queries + that reference a large number of views may not have all of + their views in this list. You can use the + total_tables_processed count to know how many total tables + were read in the query. For new queries, there is currently + no limit. + total_views_processed (int): + Total number of unique views referenced in + the query. + query_output_row_count (int): + Number of output rows produced by the query + job. + total_load_output_bytes (int): + Total bytes loaded for an import job. + """ + + class ReservationResourceUsage(proto.Message): + r"""Job resource usage breakdown by reservation. + Attributes: + name (str): + Reservation name or "unreserved" for on- + emand resources usage. + slot_ms (int): + Total slot milliseconds used by the + reservation for a particular job. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + slot_ms = proto.Field( + proto.INT64, + number=2, + ) + + create_time = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + start_time = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + end_time = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + total_processed_bytes = proto.Field( + proto.INT64, + number=4, + ) + total_billed_bytes = proto.Field( + proto.INT64, + number=5, + ) + billing_tier = proto.Field( + proto.INT32, + number=7, + ) + total_slot_ms = proto.Field( + proto.INT64, + number=8, + ) + reservation_usage = proto.RepeatedField( + proto.MESSAGE, + number=14, + message=ReservationResourceUsage, + ) + referenced_tables = proto.RepeatedField( + proto.MESSAGE, + number=9, + message='TableName', + ) + total_tables_processed = proto.Field( + proto.INT32, + number=10, + ) + referenced_views = proto.RepeatedField( + proto.MESSAGE, + number=11, + message='TableName', + ) + total_views_processed = proto.Field( + proto.INT32, + number=12, + ) + query_output_row_count = proto.Field( + proto.INT64, + number=15, + ) + total_load_output_bytes = proto.Field( + proto.INT64, + number=13, + ) + + +class DatasetName(proto.Message): + r"""The fully-qualified name for a dataset. + Attributes: + project_id (str): + The project ID. + dataset_id (str): + The dataset ID within the project. + """ + + project_id = proto.Field( + proto.STRING, + number=1, + ) + dataset_id = proto.Field( + proto.STRING, + number=2, + ) + + +class TableName(proto.Message): + r"""The fully-qualified name for a table. + Attributes: + project_id (str): + The project ID. + dataset_id (str): + The dataset ID within the project. + table_id (str): + The table ID of the table within the dataset. + """ + + project_id = proto.Field( + proto.STRING, + number=1, + ) + dataset_id = proto.Field( + proto.STRING, + number=2, + ) + table_id = proto.Field( + proto.STRING, + number=3, + ) + + +class JobName(proto.Message): + r"""The fully-qualified name for a job. + Attributes: + project_id (str): + The project ID. + job_id (str): + The job ID within the project. + location (str): + The job location. + """ + + project_id = proto.Field( + proto.STRING, + number=1, + ) + job_id = proto.Field( + proto.STRING, + number=2, + ) + location = proto.Field( + proto.STRING, + number=3, + ) + + +class EncryptionInfo(proto.Message): + r"""Describes encryption properties for a table or a job + Attributes: + kms_key_name (str): + unique identifier for cloud kms key + """ + + kms_key_name = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/mypy.ini b/owl-bot-staging/v1/mypy.ini new file mode 100644 index 0000000..4505b48 --- /dev/null +++ b/owl-bot-staging/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.6 +namespace_packages = True diff --git a/owl-bot-staging/v1/noxfile.py b/owl-bot-staging/v1/noxfile.py new file mode 100644 index 0000000..bcac2b8 --- /dev/null +++ b/owl-bot-staging/v1/noxfile.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", +] + +@nox.session(python=['3.6', '3.7', '3.8', '3.9']) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/bigquery_logging_v1/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python='3.7') +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=['3.6', '3.7']) +def mypy(session): + """Run the type checker.""" + session.install('mypy', 'types-pkg_resources') + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python='3.6') +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx<3.0.0", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/owl-bot-staging/v1/scripts/fixup_bigquery_logging_v1_keywords.py b/owl-bot-staging/v1/scripts/fixup_bigquery_logging_v1_keywords.py new file mode 100644 index 0000000..4f0341f --- /dev/null +++ b/owl-bot-staging/v1/scripts/fixup_bigquery_logging_v1_keywords.py @@ -0,0 +1,175 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class bigquery_loggingCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=bigquery_loggingCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the bigquery_logging client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v1/setup.py b/owl-bot-staging/v1/setup.py new file mode 100644 index 0000000..64273f7 --- /dev/null +++ b/owl-bot-staging/v1/setup.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import setuptools # type: ignore + +version = '0.1.0' + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, 'README.rst') +with io.open(readme_filename, encoding='utf-8') as readme_file: + readme = readme_file.read() + +setuptools.setup( + name='google-cloud-bigquery-logging', + version=version, + long_description=readme, + packages=setuptools.PEP420PackageFinder.find(), + namespace_packages=('google', 'google.cloud'), + platforms='Posix; MacOS X; Windows', + include_package_data=True, + install_requires=( + 'google-api-core[grpc] >= 1.27.0, < 2.0.0dev', + 'libcst >= 0.2.5', + 'proto-plus >= 1.15.0', + 'packaging >= 14.3', 'grpc-google-iam-v1 >= 0.12.3, < 0.13dev', ), + python_requires='>=3.6', + classifiers=[ + 'Development Status :: 3 - Alpha', + 'Intended Audience :: Developers', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Topic :: Internet', + 'Topic :: Software Development :: Libraries :: Python Modules', + ], + zip_safe=False, +) diff --git a/owl-bot-staging/v1/tests/__init__.py b/owl-bot-staging/v1/tests/__init__.py new file mode 100644 index 0000000..b54a5fc --- /dev/null +++ b/owl-bot-staging/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/__init__.py b/owl-bot-staging/v1/tests/unit/__init__.py new file mode 100644 index 0000000..b54a5fc --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/__init__.py new file mode 100644 index 0000000..b54a5fc --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/gapic/bigquery_logging_v1/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/bigquery_logging_v1/__init__.py new file mode 100644 index 0000000..b54a5fc --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/bigquery_logging_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# From ea614cb7aa71d00c4635d56561c43658b2889b90 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Tue, 22 Jun 2021 00:31:12 +0000 Subject: [PATCH 2/2] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/master/packages/owl-bot/README.md --- .coveragerc | 1 - owl-bot-staging/v1/.coveragerc | 17 - owl-bot-staging/v1/MANIFEST.in | 2 - owl-bot-staging/v1/README.rst | 49 - .../v1/docs/bigquery_logging_v1/services.rst | 4 - .../v1/docs/bigquery_logging_v1/types.rst | 7 - owl-bot-staging/v1/docs/conf.py | 376 ----- owl-bot-staging/v1/docs/index.rst | 7 - .../google/cloud/bigquery_logging/__init__.py | 89 -- .../v1/google/cloud/bigquery_logging/py.typed | 2 - .../cloud/bigquery_logging_v1/__init__.py | 90 -- .../bigquery_logging_v1/gapic_metadata.json | 7 - .../google/cloud/bigquery_logging_v1/py.typed | 2 - .../bigquery_logging_v1/services/__init__.py | 15 - .../bigquery_logging_v1/types/__init__.py | 90 -- .../bigquery_logging_v1/types/audit_data.py | 1393 ----------------- owl-bot-staging/v1/mypy.ini | 3 - owl-bot-staging/v1/noxfile.py | 132 -- .../fixup_bigquery_logging_v1_keywords.py | 175 --- owl-bot-staging/v1/setup.py | 53 - owl-bot-staging/v1/tests/__init__.py | 16 - owl-bot-staging/v1/tests/unit/__init__.py | 16 - .../v1/tests/unit/gapic/__init__.py | 16 - .../gapic/bigquery_logging_v1/__init__.py | 16 - 24 files changed, 2578 deletions(-) delete mode 100644 owl-bot-staging/v1/.coveragerc delete mode 100644 owl-bot-staging/v1/MANIFEST.in delete mode 100644 owl-bot-staging/v1/README.rst delete mode 100644 owl-bot-staging/v1/docs/bigquery_logging_v1/services.rst delete mode 100644 owl-bot-staging/v1/docs/bigquery_logging_v1/types.rst delete mode 100644 owl-bot-staging/v1/docs/conf.py delete mode 100644 owl-bot-staging/v1/docs/index.rst delete mode 100644 owl-bot-staging/v1/google/cloud/bigquery_logging/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/bigquery_logging/py.typed delete mode 100644 owl-bot-staging/v1/google/cloud/bigquery_logging_v1/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/bigquery_logging_v1/gapic_metadata.json delete mode 100644 owl-bot-staging/v1/google/cloud/bigquery_logging_v1/py.typed delete mode 100644 owl-bot-staging/v1/google/cloud/bigquery_logging_v1/services/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/bigquery_logging_v1/types/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/bigquery_logging_v1/types/audit_data.py delete mode 100644 owl-bot-staging/v1/mypy.ini delete mode 100644 owl-bot-staging/v1/noxfile.py delete mode 100644 owl-bot-staging/v1/scripts/fixup_bigquery_logging_v1_keywords.py delete mode 100644 owl-bot-staging/v1/setup.py delete mode 100644 owl-bot-staging/v1/tests/__init__.py delete mode 100644 owl-bot-staging/v1/tests/unit/__init__.py delete mode 100644 owl-bot-staging/v1/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/v1/tests/unit/gapic/bigquery_logging_v1/__init__.py diff --git a/.coveragerc b/.coveragerc index ae7a80a..2ae3ab8 100644 --- a/.coveragerc +++ b/.coveragerc @@ -2,7 +2,6 @@ branch = True [report] -fail_under = 100 show_missing = True omit = google/cloud/bigquery_logging/__init__.py diff --git a/owl-bot-staging/v1/.coveragerc b/owl-bot-staging/v1/.coveragerc deleted file mode 100644 index 2ae3ab8..0000000 --- a/owl-bot-staging/v1/.coveragerc +++ /dev/null @@ -1,17 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/bigquery_logging/__init__.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ - # Ignore pkg_resources exceptions. - # This is added at the module level as a safeguard for if someone - # generates the code and tries to run it without pip installing. This - # makes it virtually impossible to test properly. - except pkg_resources.DistributionNotFound diff --git a/owl-bot-staging/v1/MANIFEST.in b/owl-bot-staging/v1/MANIFEST.in deleted file mode 100644 index e3b1309..0000000 --- a/owl-bot-staging/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/bigquery_logging *.py -recursive-include google/cloud/bigquery_logging_v1 *.py diff --git a/owl-bot-staging/v1/README.rst b/owl-bot-staging/v1/README.rst deleted file mode 100644 index 223a1f6..0000000 --- a/owl-bot-staging/v1/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Bigquery Logging API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Bigquery Logging API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v1/docs/bigquery_logging_v1/services.rst b/owl-bot-staging/v1/docs/bigquery_logging_v1/services.rst deleted file mode 100644 index b54c5c1..0000000 --- a/owl-bot-staging/v1/docs/bigquery_logging_v1/services.rst +++ /dev/null @@ -1,4 +0,0 @@ -Services for Google Cloud Bigquery Logging v1 API -================================================= -.. toctree:: - :maxdepth: 2 diff --git a/owl-bot-staging/v1/docs/bigquery_logging_v1/types.rst b/owl-bot-staging/v1/docs/bigquery_logging_v1/types.rst deleted file mode 100644 index bba03ca..0000000 --- a/owl-bot-staging/v1/docs/bigquery_logging_v1/types.rst +++ /dev/null @@ -1,7 +0,0 @@ -Types for Google Cloud Bigquery Logging v1 API -============================================== - -.. automodule:: google.cloud.bigquery_logging_v1.types - :members: - :undoc-members: - :show-inheritance: diff --git a/owl-bot-staging/v1/docs/conf.py b/owl-bot-staging/v1/docs/conf.py deleted file mode 100644 index 9718c40..0000000 --- a/owl-bot-staging/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-bigquery-logging documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = "index" - -# General information about the project. -project = u"google-cloud-bigquery-logging" -copyright = u"2020, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-bigquery-logging-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - master_doc, - "google-cloud-bigquery-logging.tex", - u"google-cloud-bigquery-logging Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - master_doc, - "google-cloud-bigquery-logging", - u"Google Cloud Bigquery Logging Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - master_doc, - "google-cloud-bigquery-logging", - u"google-cloud-bigquery-logging Documentation", - author, - "google-cloud-bigquery-logging", - "GAPIC library for Google Cloud Bigquery Logging API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/v1/docs/index.rst b/owl-bot-staging/v1/docs/index.rst deleted file mode 100644 index e3f5c51..0000000 --- a/owl-bot-staging/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - bigquery_logging_v1/services - bigquery_logging_v1/types diff --git a/owl-bot-staging/v1/google/cloud/bigquery_logging/__init__.py b/owl-bot-staging/v1/google/cloud/bigquery_logging/__init__.py deleted file mode 100644 index 01793a6..0000000 --- a/owl-bot-staging/v1/google/cloud/bigquery_logging/__init__.py +++ /dev/null @@ -1,89 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - - -from google.cloud.bigquery_logging_v1.types.audit_data import AuditData -from google.cloud.bigquery_logging_v1.types.audit_data import BigQueryAcl -from google.cloud.bigquery_logging_v1.types.audit_data import Dataset -from google.cloud.bigquery_logging_v1.types.audit_data import DatasetInfo -from google.cloud.bigquery_logging_v1.types.audit_data import DatasetInsertRequest -from google.cloud.bigquery_logging_v1.types.audit_data import DatasetInsertResponse -from google.cloud.bigquery_logging_v1.types.audit_data import DatasetListRequest -from google.cloud.bigquery_logging_v1.types.audit_data import DatasetName -from google.cloud.bigquery_logging_v1.types.audit_data import DatasetUpdateRequest -from google.cloud.bigquery_logging_v1.types.audit_data import DatasetUpdateResponse -from google.cloud.bigquery_logging_v1.types.audit_data import EncryptionInfo -from google.cloud.bigquery_logging_v1.types.audit_data import Job -from google.cloud.bigquery_logging_v1.types.audit_data import JobCompletedEvent -from google.cloud.bigquery_logging_v1.types.audit_data import JobConfiguration -from google.cloud.bigquery_logging_v1.types.audit_data import JobGetQueryResultsRequest -from google.cloud.bigquery_logging_v1.types.audit_data import JobGetQueryResultsResponse -from google.cloud.bigquery_logging_v1.types.audit_data import JobInsertRequest -from google.cloud.bigquery_logging_v1.types.audit_data import JobInsertResponse -from google.cloud.bigquery_logging_v1.types.audit_data import JobName -from google.cloud.bigquery_logging_v1.types.audit_data import JobQueryDoneResponse -from google.cloud.bigquery_logging_v1.types.audit_data import JobQueryRequest -from google.cloud.bigquery_logging_v1.types.audit_data import JobQueryResponse -from google.cloud.bigquery_logging_v1.types.audit_data import JobStatistics -from google.cloud.bigquery_logging_v1.types.audit_data import JobStatus -from google.cloud.bigquery_logging_v1.types.audit_data import Table -from google.cloud.bigquery_logging_v1.types.audit_data import TableDataListRequest -from google.cloud.bigquery_logging_v1.types.audit_data import TableDataReadEvent -from google.cloud.bigquery_logging_v1.types.audit_data import TableDefinition -from google.cloud.bigquery_logging_v1.types.audit_data import TableInfo -from google.cloud.bigquery_logging_v1.types.audit_data import TableInsertRequest -from google.cloud.bigquery_logging_v1.types.audit_data import TableInsertResponse -from google.cloud.bigquery_logging_v1.types.audit_data import TableName -from google.cloud.bigquery_logging_v1.types.audit_data import TableUpdateRequest -from google.cloud.bigquery_logging_v1.types.audit_data import TableUpdateResponse -from google.cloud.bigquery_logging_v1.types.audit_data import TableViewDefinition - -__all__ = ('AuditData', - 'BigQueryAcl', - 'Dataset', - 'DatasetInfo', - 'DatasetInsertRequest', - 'DatasetInsertResponse', - 'DatasetListRequest', - 'DatasetName', - 'DatasetUpdateRequest', - 'DatasetUpdateResponse', - 'EncryptionInfo', - 'Job', - 'JobCompletedEvent', - 'JobConfiguration', - 'JobGetQueryResultsRequest', - 'JobGetQueryResultsResponse', - 'JobInsertRequest', - 'JobInsertResponse', - 'JobName', - 'JobQueryDoneResponse', - 'JobQueryRequest', - 'JobQueryResponse', - 'JobStatistics', - 'JobStatus', - 'Table', - 'TableDataListRequest', - 'TableDataReadEvent', - 'TableDefinition', - 'TableInfo', - 'TableInsertRequest', - 'TableInsertResponse', - 'TableName', - 'TableUpdateRequest', - 'TableUpdateResponse', - 'TableViewDefinition', -) diff --git a/owl-bot-staging/v1/google/cloud/bigquery_logging/py.typed b/owl-bot-staging/v1/google/cloud/bigquery_logging/py.typed deleted file mode 100644 index 8cff817..0000000 --- a/owl-bot-staging/v1/google/cloud/bigquery_logging/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-bigquery-logging package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/__init__.py b/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/__init__.py deleted file mode 100644 index 2f721d4..0000000 --- a/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/__init__.py +++ /dev/null @@ -1,90 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - - -from .types.audit_data import AuditData -from .types.audit_data import BigQueryAcl -from .types.audit_data import Dataset -from .types.audit_data import DatasetInfo -from .types.audit_data import DatasetInsertRequest -from .types.audit_data import DatasetInsertResponse -from .types.audit_data import DatasetListRequest -from .types.audit_data import DatasetName -from .types.audit_data import DatasetUpdateRequest -from .types.audit_data import DatasetUpdateResponse -from .types.audit_data import EncryptionInfo -from .types.audit_data import Job -from .types.audit_data import JobCompletedEvent -from .types.audit_data import JobConfiguration -from .types.audit_data import JobGetQueryResultsRequest -from .types.audit_data import JobGetQueryResultsResponse -from .types.audit_data import JobInsertRequest -from .types.audit_data import JobInsertResponse -from .types.audit_data import JobName -from .types.audit_data import JobQueryDoneResponse -from .types.audit_data import JobQueryRequest -from .types.audit_data import JobQueryResponse -from .types.audit_data import JobStatistics -from .types.audit_data import JobStatus -from .types.audit_data import Table -from .types.audit_data import TableDataListRequest -from .types.audit_data import TableDataReadEvent -from .types.audit_data import TableDefinition -from .types.audit_data import TableInfo -from .types.audit_data import TableInsertRequest -from .types.audit_data import TableInsertResponse -from .types.audit_data import TableName -from .types.audit_data import TableUpdateRequest -from .types.audit_data import TableUpdateResponse -from .types.audit_data import TableViewDefinition - -__all__ = ( -'AuditData', -'BigQueryAcl', -'Dataset', -'DatasetInfo', -'DatasetInsertRequest', -'DatasetInsertResponse', -'DatasetListRequest', -'DatasetName', -'DatasetUpdateRequest', -'DatasetUpdateResponse', -'EncryptionInfo', -'Job', -'JobCompletedEvent', -'JobConfiguration', -'JobGetQueryResultsRequest', -'JobGetQueryResultsResponse', -'JobInsertRequest', -'JobInsertResponse', -'JobName', -'JobQueryDoneResponse', -'JobQueryRequest', -'JobQueryResponse', -'JobStatistics', -'JobStatus', -'Table', -'TableDataListRequest', -'TableDataReadEvent', -'TableDefinition', -'TableInfo', -'TableInsertRequest', -'TableInsertResponse', -'TableName', -'TableUpdateRequest', -'TableUpdateResponse', -'TableViewDefinition', -) diff --git a/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/gapic_metadata.json b/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/gapic_metadata.json deleted file mode 100644 index 5c730dc..0000000 --- a/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/gapic_metadata.json +++ /dev/null @@ -1,7 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.bigquery_logging_v1", - "protoPackage": "google.cloud.bigquery.logging.v1", - "schema": "1.0" -} diff --git a/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/py.typed b/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/py.typed deleted file mode 100644 index 8cff817..0000000 --- a/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-bigquery-logging package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/services/__init__.py b/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/services/__init__.py deleted file mode 100644 index 4de6597..0000000 --- a/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/types/__init__.py b/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/types/__init__.py deleted file mode 100644 index 19d6d96..0000000 --- a/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/types/__init__.py +++ /dev/null @@ -1,90 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .audit_data import ( - AuditData, - BigQueryAcl, - Dataset, - DatasetInfo, - DatasetInsertRequest, - DatasetInsertResponse, - DatasetListRequest, - DatasetName, - DatasetUpdateRequest, - DatasetUpdateResponse, - EncryptionInfo, - Job, - JobCompletedEvent, - JobConfiguration, - JobGetQueryResultsRequest, - JobGetQueryResultsResponse, - JobInsertRequest, - JobInsertResponse, - JobName, - JobQueryDoneResponse, - JobQueryRequest, - JobQueryResponse, - JobStatistics, - JobStatus, - Table, - TableDataListRequest, - TableDataReadEvent, - TableDefinition, - TableInfo, - TableInsertRequest, - TableInsertResponse, - TableName, - TableUpdateRequest, - TableUpdateResponse, - TableViewDefinition, -) - -__all__ = ( - 'AuditData', - 'BigQueryAcl', - 'Dataset', - 'DatasetInfo', - 'DatasetInsertRequest', - 'DatasetInsertResponse', - 'DatasetListRequest', - 'DatasetName', - 'DatasetUpdateRequest', - 'DatasetUpdateResponse', - 'EncryptionInfo', - 'Job', - 'JobCompletedEvent', - 'JobConfiguration', - 'JobGetQueryResultsRequest', - 'JobGetQueryResultsResponse', - 'JobInsertRequest', - 'JobInsertResponse', - 'JobName', - 'JobQueryDoneResponse', - 'JobQueryRequest', - 'JobQueryResponse', - 'JobStatistics', - 'JobStatus', - 'Table', - 'TableDataListRequest', - 'TableDataReadEvent', - 'TableDefinition', - 'TableInfo', - 'TableInsertRequest', - 'TableInsertResponse', - 'TableName', - 'TableUpdateRequest', - 'TableUpdateResponse', - 'TableViewDefinition', -) diff --git a/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/types/audit_data.py b/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/types/audit_data.py deleted file mode 100644 index 6c14bdb..0000000 --- a/owl-bot-staging/v1/google/cloud/bigquery_logging_v1/types/audit_data.py +++ /dev/null @@ -1,1393 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.iam.v1 import iam_policy_pb2 # type: ignore -from google.iam.v1 import policy_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.bigquery.logging.v1', - manifest={ - 'AuditData', - 'TableInsertRequest', - 'TableUpdateRequest', - 'TableInsertResponse', - 'TableUpdateResponse', - 'DatasetListRequest', - 'DatasetInsertRequest', - 'DatasetInsertResponse', - 'DatasetUpdateRequest', - 'DatasetUpdateResponse', - 'JobInsertRequest', - 'JobInsertResponse', - 'JobQueryRequest', - 'JobQueryResponse', - 'JobGetQueryResultsRequest', - 'JobGetQueryResultsResponse', - 'JobQueryDoneResponse', - 'JobCompletedEvent', - 'TableDataReadEvent', - 'TableDataListRequest', - 'Table', - 'TableInfo', - 'TableViewDefinition', - 'Dataset', - 'DatasetInfo', - 'BigQueryAcl', - 'Job', - 'JobConfiguration', - 'TableDefinition', - 'JobStatus', - 'JobStatistics', - 'DatasetName', - 'TableName', - 'JobName', - 'EncryptionInfo', - }, -) - - -class AuditData(proto.Message): - r"""BigQuery request and response messages for audit log. Note: - ``Table.schema`` has been deprecated in favor of - ``Table.schemaJson``. ``Table.schema`` may continue to be present in - your logs during this transition. - - Attributes: - table_insert_request (google.cloud.bigquery_logging_v1.types.TableInsertRequest): - Table insert request. - table_update_request (google.cloud.bigquery_logging_v1.types.TableUpdateRequest): - Table update request. - dataset_list_request (google.cloud.bigquery_logging_v1.types.DatasetListRequest): - Dataset list request. - dataset_insert_request (google.cloud.bigquery_logging_v1.types.DatasetInsertRequest): - Dataset insert request. - dataset_update_request (google.cloud.bigquery_logging_v1.types.DatasetUpdateRequest): - Dataset update request. - job_insert_request (google.cloud.bigquery_logging_v1.types.JobInsertRequest): - Job insert request. - job_query_request (google.cloud.bigquery_logging_v1.types.JobQueryRequest): - Job query request. - job_get_query_results_request (google.cloud.bigquery_logging_v1.types.JobGetQueryResultsRequest): - Job get query results request. - table_data_list_request (google.cloud.bigquery_logging_v1.types.TableDataListRequest): - Table data-list request. - set_iam_policy_request (google.iam.v1.iam_policy_pb2.SetIamPolicyRequest): - Iam policy request. - table_insert_response (google.cloud.bigquery_logging_v1.types.TableInsertResponse): - Table insert response. - table_update_response (google.cloud.bigquery_logging_v1.types.TableUpdateResponse): - Table update response. - dataset_insert_response (google.cloud.bigquery_logging_v1.types.DatasetInsertResponse): - Dataset insert response. - dataset_update_response (google.cloud.bigquery_logging_v1.types.DatasetUpdateResponse): - Dataset update response. - job_insert_response (google.cloud.bigquery_logging_v1.types.JobInsertResponse): - Job insert response. - job_query_response (google.cloud.bigquery_logging_v1.types.JobQueryResponse): - Job query response. - job_get_query_results_response (google.cloud.bigquery_logging_v1.types.JobGetQueryResultsResponse): - Job get query results response. - job_query_done_response (google.cloud.bigquery_logging_v1.types.JobQueryDoneResponse): - Deprecated: Job query-done response. Use this - information for usage analysis. - policy_response (google.iam.v1.policy_pb2.Policy): - Iam Policy. - job_completed_event (google.cloud.bigquery_logging_v1.types.JobCompletedEvent): - A job completion event. - table_data_read_events (Sequence[google.cloud.bigquery_logging_v1.types.TableDataReadEvent]): - Information about the table access events. - """ - - table_insert_request = proto.Field( - proto.MESSAGE, - number=1, - oneof='request', - message='TableInsertRequest', - ) - table_update_request = proto.Field( - proto.MESSAGE, - number=16, - oneof='request', - message='TableUpdateRequest', - ) - dataset_list_request = proto.Field( - proto.MESSAGE, - number=2, - oneof='request', - message='DatasetListRequest', - ) - dataset_insert_request = proto.Field( - proto.MESSAGE, - number=3, - oneof='request', - message='DatasetInsertRequest', - ) - dataset_update_request = proto.Field( - proto.MESSAGE, - number=4, - oneof='request', - message='DatasetUpdateRequest', - ) - job_insert_request = proto.Field( - proto.MESSAGE, - number=5, - oneof='request', - message='JobInsertRequest', - ) - job_query_request = proto.Field( - proto.MESSAGE, - number=6, - oneof='request', - message='JobQueryRequest', - ) - job_get_query_results_request = proto.Field( - proto.MESSAGE, - number=7, - oneof='request', - message='JobGetQueryResultsRequest', - ) - table_data_list_request = proto.Field( - proto.MESSAGE, - number=8, - oneof='request', - message='TableDataListRequest', - ) - set_iam_policy_request = proto.Field( - proto.MESSAGE, - number=20, - oneof='request', - message=iam_policy_pb2.SetIamPolicyRequest, - ) - table_insert_response = proto.Field( - proto.MESSAGE, - number=9, - oneof='response', - message='TableInsertResponse', - ) - table_update_response = proto.Field( - proto.MESSAGE, - number=10, - oneof='response', - message='TableUpdateResponse', - ) - dataset_insert_response = proto.Field( - proto.MESSAGE, - number=11, - oneof='response', - message='DatasetInsertResponse', - ) - dataset_update_response = proto.Field( - proto.MESSAGE, - number=12, - oneof='response', - message='DatasetUpdateResponse', - ) - job_insert_response = proto.Field( - proto.MESSAGE, - number=18, - oneof='response', - message='JobInsertResponse', - ) - job_query_response = proto.Field( - proto.MESSAGE, - number=13, - oneof='response', - message='JobQueryResponse', - ) - job_get_query_results_response = proto.Field( - proto.MESSAGE, - number=14, - oneof='response', - message='JobGetQueryResultsResponse', - ) - job_query_done_response = proto.Field( - proto.MESSAGE, - number=15, - oneof='response', - message='JobQueryDoneResponse', - ) - policy_response = proto.Field( - proto.MESSAGE, - number=21, - oneof='response', - message=policy_pb2.Policy, - ) - job_completed_event = proto.Field( - proto.MESSAGE, - number=17, - message='JobCompletedEvent', - ) - table_data_read_events = proto.RepeatedField( - proto.MESSAGE, - number=19, - message='TableDataReadEvent', - ) - - -class TableInsertRequest(proto.Message): - r"""Table insert request. - Attributes: - resource (google.cloud.bigquery_logging_v1.types.Table): - The new table. - """ - - resource = proto.Field( - proto.MESSAGE, - number=1, - message='Table', - ) - - -class TableUpdateRequest(proto.Message): - r"""Table update request. - Attributes: - resource (google.cloud.bigquery_logging_v1.types.Table): - The table to be updated. - """ - - resource = proto.Field( - proto.MESSAGE, - number=1, - message='Table', - ) - - -class TableInsertResponse(proto.Message): - r"""Table insert response. - Attributes: - resource (google.cloud.bigquery_logging_v1.types.Table): - Final state of the inserted table. - """ - - resource = proto.Field( - proto.MESSAGE, - number=1, - message='Table', - ) - - -class TableUpdateResponse(proto.Message): - r"""Table update response. - Attributes: - resource (google.cloud.bigquery_logging_v1.types.Table): - Final state of the updated table. - """ - - resource = proto.Field( - proto.MESSAGE, - number=1, - message='Table', - ) - - -class DatasetListRequest(proto.Message): - r"""Dataset list request. - Attributes: - list_all (bool): - Whether to list all datasets, including - hidden ones. - """ - - list_all = proto.Field( - proto.BOOL, - number=1, - ) - - -class DatasetInsertRequest(proto.Message): - r"""Dataset insert request. - Attributes: - resource (google.cloud.bigquery_logging_v1.types.Dataset): - The dataset to be inserted. - """ - - resource = proto.Field( - proto.MESSAGE, - number=1, - message='Dataset', - ) - - -class DatasetInsertResponse(proto.Message): - r"""Dataset insert response. - Attributes: - resource (google.cloud.bigquery_logging_v1.types.Dataset): - Final state of the inserted dataset. - """ - - resource = proto.Field( - proto.MESSAGE, - number=1, - message='Dataset', - ) - - -class DatasetUpdateRequest(proto.Message): - r"""Dataset update request. - Attributes: - resource (google.cloud.bigquery_logging_v1.types.Dataset): - The dataset to be updated. - """ - - resource = proto.Field( - proto.MESSAGE, - number=1, - message='Dataset', - ) - - -class DatasetUpdateResponse(proto.Message): - r"""Dataset update response. - Attributes: - resource (google.cloud.bigquery_logging_v1.types.Dataset): - Final state of the updated dataset. - """ - - resource = proto.Field( - proto.MESSAGE, - number=1, - message='Dataset', - ) - - -class JobInsertRequest(proto.Message): - r"""Job insert request. - Attributes: - resource (google.cloud.bigquery_logging_v1.types.Job): - Job insert request. - """ - - resource = proto.Field( - proto.MESSAGE, - number=1, - message='Job', - ) - - -class JobInsertResponse(proto.Message): - r"""Job insert response. - Attributes: - resource (google.cloud.bigquery_logging_v1.types.Job): - Job insert response. - """ - - resource = proto.Field( - proto.MESSAGE, - number=1, - message='Job', - ) - - -class JobQueryRequest(proto.Message): - r"""Job query request. - Attributes: - query (str): - The query. - max_results (int): - The maximum number of results. - default_dataset (google.cloud.bigquery_logging_v1.types.DatasetName): - The default dataset for tables that do not - have a dataset specified. - project_id (str): - Project that the query should be charged to. - dry_run (bool): - If true, don't actually run the job. Just - check that it would run. - """ - - query = proto.Field( - proto.STRING, - number=1, - ) - max_results = proto.Field( - proto.UINT32, - number=2, - ) - default_dataset = proto.Field( - proto.MESSAGE, - number=3, - message='DatasetName', - ) - project_id = proto.Field( - proto.STRING, - number=4, - ) - dry_run = proto.Field( - proto.BOOL, - number=5, - ) - - -class JobQueryResponse(proto.Message): - r"""Job query response. - Attributes: - total_results (int): - The total number of rows in the full query - result set. - job (google.cloud.bigquery_logging_v1.types.Job): - Information about the queried job. - """ - - total_results = proto.Field( - proto.UINT64, - number=1, - ) - job = proto.Field( - proto.MESSAGE, - number=2, - message='Job', - ) - - -class JobGetQueryResultsRequest(proto.Message): - r"""Job getQueryResults request. - Attributes: - max_results (int): - Maximum number of results to return. - start_row (int): - Zero-based row number at which to start. - """ - - max_results = proto.Field( - proto.UINT32, - number=1, - ) - start_row = proto.Field( - proto.UINT64, - number=2, - ) - - -class JobGetQueryResultsResponse(proto.Message): - r"""Job getQueryResults response. - Attributes: - total_results (int): - Total number of results in query results. - job (google.cloud.bigquery_logging_v1.types.Job): - The job that was created to run the query. It completed if - ``job.status.state`` is ``DONE``. It failed if - ``job.status.errorResult`` is also present. - """ - - total_results = proto.Field( - proto.UINT64, - number=1, - ) - job = proto.Field( - proto.MESSAGE, - number=2, - message='Job', - ) - - -class JobQueryDoneResponse(proto.Message): - r"""Job getQueryDone response. - Attributes: - job (google.cloud.bigquery_logging_v1.types.Job): - The job and status information. The job completed if - ``job.status.state`` is ``DONE``. - """ - - job = proto.Field( - proto.MESSAGE, - number=1, - message='Job', - ) - - -class JobCompletedEvent(proto.Message): - r"""Query job completed event. - Attributes: - event_name (str): - Name of the event. - job (google.cloud.bigquery_logging_v1.types.Job): - Job information. - """ - - event_name = proto.Field( - proto.STRING, - number=1, - ) - job = proto.Field( - proto.MESSAGE, - number=2, - message='Job', - ) - - -class TableDataReadEvent(proto.Message): - r"""Table data read event. Only present for tables, not views, - and is only included in the log record for the project that owns - the table. - - Attributes: - table_name (google.cloud.bigquery_logging_v1.types.TableName): - Name of the accessed table. - referenced_fields (Sequence[str]): - A list of referenced fields. This information - is not included by default. To enable this in - the logs, please contact BigQuery support or - open a bug in the BigQuery issue tracker. - """ - - table_name = proto.Field( - proto.MESSAGE, - number=1, - message='TableName', - ) - referenced_fields = proto.RepeatedField( - proto.STRING, - number=2, - ) - - -class TableDataListRequest(proto.Message): - r"""Table data-list request. - Attributes: - start_row (int): - Starting row offset. - max_results (int): - Maximum number of results to return. - """ - - start_row = proto.Field( - proto.UINT64, - number=1, - ) - max_results = proto.Field( - proto.UINT32, - number=2, - ) - - -class Table(proto.Message): - r"""Describes a BigQuery table. See the - `Table `__ API resource for more - details on individual fields. Note: ``Table.schema`` has been - deprecated in favor of ``Table.schemaJson``. ``Table.schema`` may - continue to be present in your logs during this transition. - - Attributes: - table_name (google.cloud.bigquery_logging_v1.types.TableName): - The name of the table. - info (google.cloud.bigquery_logging_v1.types.TableInfo): - User-provided metadata for the table. - schema_json (str): - A JSON representation of the table's schema. - view (google.cloud.bigquery_logging_v1.types.TableViewDefinition): - If present, this is a virtual table defined - by a SQL query. - expire_time (google.protobuf.timestamp_pb2.Timestamp): - The expiration date for the table, after - which the table is deleted and the storage - reclaimed. If not present, the table persists - indefinitely. - create_time (google.protobuf.timestamp_pb2.Timestamp): - The time the table was created. - truncate_time (google.protobuf.timestamp_pb2.Timestamp): - The time the table was last truncated by an operation with a - ``writeDisposition`` of ``WRITE_TRUNCATE``. - update_time (google.protobuf.timestamp_pb2.Timestamp): - The time the table was last modified. - encryption (google.cloud.bigquery_logging_v1.types.EncryptionInfo): - The table encryption information. Set when - non-default encryption is used. - """ - - table_name = proto.Field( - proto.MESSAGE, - number=1, - message='TableName', - ) - info = proto.Field( - proto.MESSAGE, - number=2, - message='TableInfo', - ) - schema_json = proto.Field( - proto.STRING, - number=8, - ) - view = proto.Field( - proto.MESSAGE, - number=4, - message='TableViewDefinition', - ) - expire_time = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - create_time = proto.Field( - proto.MESSAGE, - number=6, - message=timestamp_pb2.Timestamp, - ) - truncate_time = proto.Field( - proto.MESSAGE, - number=7, - message=timestamp_pb2.Timestamp, - ) - update_time = proto.Field( - proto.MESSAGE, - number=9, - message=timestamp_pb2.Timestamp, - ) - encryption = proto.Field( - proto.MESSAGE, - number=10, - message='EncryptionInfo', - ) - - -class TableInfo(proto.Message): - r"""User-provided metadata for a table. - Attributes: - friendly_name (str): - A short name for the table, such - as\ ``"Analytics Data - Jan 2011"``. - description (str): - A long description, perhaps several - paragraphs, describing the table contents in - detail. - labels (Sequence[google.cloud.bigquery_logging_v1.types.TableInfo.LabelsEntry]): - Labels provided for the table. - """ - - friendly_name = proto.Field( - proto.STRING, - number=1, - ) - description = proto.Field( - proto.STRING, - number=2, - ) - labels = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - - -class TableViewDefinition(proto.Message): - r"""Describes a virtual table defined by a SQL query. - Attributes: - query (str): - SQL query defining the view. - """ - - query = proto.Field( - proto.STRING, - number=1, - ) - - -class Dataset(proto.Message): - r"""BigQuery dataset information. See the - `Dataset `__ API resource for - more details on individual fields. - - Attributes: - dataset_name (google.cloud.bigquery_logging_v1.types.DatasetName): - The name of the dataset. - info (google.cloud.bigquery_logging_v1.types.DatasetInfo): - User-provided metadata for the dataset. - create_time (google.protobuf.timestamp_pb2.Timestamp): - The time the dataset was created. - update_time (google.protobuf.timestamp_pb2.Timestamp): - The time the dataset was last modified. - acl (google.cloud.bigquery_logging_v1.types.BigQueryAcl): - The access control list for the dataset. - default_table_expire_duration (google.protobuf.duration_pb2.Duration): - If this field is present, each table that does not specify - an expiration time is assigned an expiration time by adding - this duration to the table's ``createTime``. If this field - is empty, there is no default table expiration time. - """ - - dataset_name = proto.Field( - proto.MESSAGE, - number=1, - message='DatasetName', - ) - info = proto.Field( - proto.MESSAGE, - number=2, - message='DatasetInfo', - ) - create_time = proto.Field( - proto.MESSAGE, - number=4, - message=timestamp_pb2.Timestamp, - ) - update_time = proto.Field( - proto.MESSAGE, - number=5, - message=timestamp_pb2.Timestamp, - ) - acl = proto.Field( - proto.MESSAGE, - number=6, - message='BigQueryAcl', - ) - default_table_expire_duration = proto.Field( - proto.MESSAGE, - number=8, - message=duration_pb2.Duration, - ) - - -class DatasetInfo(proto.Message): - r"""User-provided metadata for a dataset. - Attributes: - friendly_name (str): - A short name for the dataset, such - as\ ``"Analytics Data 2011"``. - description (str): - A long description, perhaps several - paragraphs, describing the dataset contents in - detail. - labels (Sequence[google.cloud.bigquery_logging_v1.types.DatasetInfo.LabelsEntry]): - Labels provided for the dataset. - """ - - friendly_name = proto.Field( - proto.STRING, - number=1, - ) - description = proto.Field( - proto.STRING, - number=2, - ) - labels = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - - -class BigQueryAcl(proto.Message): - r"""An access control list. - Attributes: - entries (Sequence[google.cloud.bigquery_logging_v1.types.BigQueryAcl.Entry]): - Access control entry list. - """ - - class Entry(proto.Message): - r"""Access control entry. - Attributes: - role (str): - The granted role, which can be ``READER``, ``WRITER``, or - ``OWNER``. - group_email (str): - Grants access to a group identified by an - email address. - user_email (str): - Grants access to a user identified by an - email address. - domain (str): - Grants access to all members of a domain. - special_group (str): - Grants access to special groups. Valid groups are - ``PROJECT_OWNERS``, ``PROJECT_READERS``, ``PROJECT_WRITERS`` - and ``ALL_AUTHENTICATED_USERS``. - view_name (google.cloud.bigquery_logging_v1.types.TableName): - Grants access to a BigQuery View. - """ - - role = proto.Field( - proto.STRING, - number=1, - ) - group_email = proto.Field( - proto.STRING, - number=2, - ) - user_email = proto.Field( - proto.STRING, - number=3, - ) - domain = proto.Field( - proto.STRING, - number=4, - ) - special_group = proto.Field( - proto.STRING, - number=5, - ) - view_name = proto.Field( - proto.MESSAGE, - number=6, - message='TableName', - ) - - entries = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=Entry, - ) - - -class Job(proto.Message): - r"""Describes a job. - Attributes: - job_name (google.cloud.bigquery_logging_v1.types.JobName): - Job name. - job_configuration (google.cloud.bigquery_logging_v1.types.JobConfiguration): - Job configuration. - job_status (google.cloud.bigquery_logging_v1.types.JobStatus): - Job status. - job_statistics (google.cloud.bigquery_logging_v1.types.JobStatistics): - Job statistics. - """ - - job_name = proto.Field( - proto.MESSAGE, - number=1, - message='JobName', - ) - job_configuration = proto.Field( - proto.MESSAGE, - number=2, - message='JobConfiguration', - ) - job_status = proto.Field( - proto.MESSAGE, - number=3, - message='JobStatus', - ) - job_statistics = proto.Field( - proto.MESSAGE, - number=4, - message='JobStatistics', - ) - - -class JobConfiguration(proto.Message): - r"""Job configuration information. See the - `Jobs `__ API resource for more - details on individual fields. - - Attributes: - query (google.cloud.bigquery_logging_v1.types.JobConfiguration.Query): - Query job information. - load (google.cloud.bigquery_logging_v1.types.JobConfiguration.Load): - Load job information. - extract (google.cloud.bigquery_logging_v1.types.JobConfiguration.Extract): - Extract job information. - table_copy (google.cloud.bigquery_logging_v1.types.JobConfiguration.TableCopy): - TableCopy job information. - dry_run (bool): - If true, don't actually run the job. Just - check that it would run. - labels (Sequence[google.cloud.bigquery_logging_v1.types.JobConfiguration.LabelsEntry]): - Labels provided for the job. - """ - - class Query(proto.Message): - r"""Describes a query job, which executes a SQL-like query. - Attributes: - query (str): - The SQL query to run. - destination_table (google.cloud.bigquery_logging_v1.types.TableName): - The table where results are written. - create_disposition (str): - Describes when a job is allowed to create a table: - ``CREATE_IF_NEEDED``, ``CREATE_NEVER``. - write_disposition (str): - Describes how writes affect existing tables: - ``WRITE_TRUNCATE``, ``WRITE_APPEND``, ``WRITE_EMPTY``. - default_dataset (google.cloud.bigquery_logging_v1.types.DatasetName): - If a table name is specified without a - dataset in a query, this dataset will be added - to table name. - table_definitions (Sequence[google.cloud.bigquery_logging_v1.types.TableDefinition]): - Describes data sources outside BigQuery, if - needed. - query_priority (str): - Describes the priority given to the query: - ``QUERY_INTERACTIVE`` or ``QUERY_BATCH``. - destination_table_encryption (google.cloud.bigquery_logging_v1.types.EncryptionInfo): - Result table encryption information. Set when - non-default encryption is used. - statement_type (str): - Type of the statement (e.g. SELECT, INSERT, CREATE_TABLE, - CREATE_MODEL..) - """ - - query = proto.Field( - proto.STRING, - number=1, - ) - destination_table = proto.Field( - proto.MESSAGE, - number=2, - message='TableName', - ) - create_disposition = proto.Field( - proto.STRING, - number=3, - ) - write_disposition = proto.Field( - proto.STRING, - number=4, - ) - default_dataset = proto.Field( - proto.MESSAGE, - number=5, - message='DatasetName', - ) - table_definitions = proto.RepeatedField( - proto.MESSAGE, - number=6, - message='TableDefinition', - ) - query_priority = proto.Field( - proto.STRING, - number=7, - ) - destination_table_encryption = proto.Field( - proto.MESSAGE, - number=8, - message='EncryptionInfo', - ) - statement_type = proto.Field( - proto.STRING, - number=9, - ) - - class Load(proto.Message): - r"""Describes a load job, which loads data from an external - source via the import pipeline. - - Attributes: - source_uris (Sequence[str]): - URIs for the data to be imported. Only Google - Cloud Storage URIs are supported. - schema_json (str): - The table schema in JSON format - representation of a TableSchema. - destination_table (google.cloud.bigquery_logging_v1.types.TableName): - The table where the imported data is written. - create_disposition (str): - Describes when a job is allowed to create a table: - ``CREATE_IF_NEEDED``, ``CREATE_NEVER``. - write_disposition (str): - Describes how writes affect existing tables: - ``WRITE_TRUNCATE``, ``WRITE_APPEND``, ``WRITE_EMPTY``. - destination_table_encryption (google.cloud.bigquery_logging_v1.types.EncryptionInfo): - Result table encryption information. Set when - non-default encryption is used. - """ - - source_uris = proto.RepeatedField( - proto.STRING, - number=1, - ) - schema_json = proto.Field( - proto.STRING, - number=6, - ) - destination_table = proto.Field( - proto.MESSAGE, - number=3, - message='TableName', - ) - create_disposition = proto.Field( - proto.STRING, - number=4, - ) - write_disposition = proto.Field( - proto.STRING, - number=5, - ) - destination_table_encryption = proto.Field( - proto.MESSAGE, - number=7, - message='EncryptionInfo', - ) - - class Extract(proto.Message): - r"""Describes an extract job, which exports data to an external - source via the export pipeline. - - Attributes: - destination_uris (Sequence[str]): - Google Cloud Storage URIs where extracted - data should be written. - source_table (google.cloud.bigquery_logging_v1.types.TableName): - The source table. - """ - - destination_uris = proto.RepeatedField( - proto.STRING, - number=1, - ) - source_table = proto.Field( - proto.MESSAGE, - number=2, - message='TableName', - ) - - class TableCopy(proto.Message): - r"""Describes a copy job, which copies an existing table to - another table. - - Attributes: - source_tables (Sequence[google.cloud.bigquery_logging_v1.types.TableName]): - Source tables. - destination_table (google.cloud.bigquery_logging_v1.types.TableName): - Destination table. - create_disposition (str): - Describes when a job is allowed to create a table: - ``CREATE_IF_NEEDED``, ``CREATE_NEVER``. - write_disposition (str): - Describes how writes affect existing tables: - ``WRITE_TRUNCATE``, ``WRITE_APPEND``, ``WRITE_EMPTY``. - destination_table_encryption (google.cloud.bigquery_logging_v1.types.EncryptionInfo): - Result table encryption information. Set when - non-default encryption is used. - """ - - source_tables = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='TableName', - ) - destination_table = proto.Field( - proto.MESSAGE, - number=2, - message='TableName', - ) - create_disposition = proto.Field( - proto.STRING, - number=3, - ) - write_disposition = proto.Field( - proto.STRING, - number=4, - ) - destination_table_encryption = proto.Field( - proto.MESSAGE, - number=5, - message='EncryptionInfo', - ) - - query = proto.Field( - proto.MESSAGE, - number=5, - oneof='configuration', - message=Query, - ) - load = proto.Field( - proto.MESSAGE, - number=6, - oneof='configuration', - message=Load, - ) - extract = proto.Field( - proto.MESSAGE, - number=7, - oneof='configuration', - message=Extract, - ) - table_copy = proto.Field( - proto.MESSAGE, - number=8, - oneof='configuration', - message=TableCopy, - ) - dry_run = proto.Field( - proto.BOOL, - number=9, - ) - labels = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - - -class TableDefinition(proto.Message): - r"""Describes an external data source used in a query. - Attributes: - name (str): - Name of the table, used in queries. - source_uris (Sequence[str]): - Google Cloud Storage URIs for the data to be - imported. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - source_uris = proto.RepeatedField( - proto.STRING, - number=2, - ) - - -class JobStatus(proto.Message): - r"""Running state of a job. - Attributes: - state (str): - State of a job: ``PENDING``, ``RUNNING``, or ``DONE``. - error (google.rpc.status_pb2.Status): - If the job did not complete successfully, - this field describes why. - additional_errors (Sequence[google.rpc.status_pb2.Status]): - Errors encountered during the running of the - job. Do not necessarily mean that the job has - completed or was unsuccessful. - """ - - state = proto.Field( - proto.STRING, - number=1, - ) - error = proto.Field( - proto.MESSAGE, - number=2, - message=status_pb2.Status, - ) - additional_errors = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=status_pb2.Status, - ) - - -class JobStatistics(proto.Message): - r"""Job statistics that may change after a job starts. - Attributes: - create_time (google.protobuf.timestamp_pb2.Timestamp): - Time when the job was created. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Time when the job started. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Time when the job ended. - total_processed_bytes (int): - Total bytes processed for a job. - total_billed_bytes (int): - Processed bytes, adjusted by the job's CPU - usage. - billing_tier (int): - The tier assigned by CPU-based billing. - total_slot_ms (int): - The total number of slot-ms consumed by the - query job. - reservation_usage (Sequence[google.cloud.bigquery_logging_v1.types.JobStatistics.ReservationResourceUsage]): - Reservation usage. - referenced_tables (Sequence[google.cloud.bigquery_logging_v1.types.TableName]): - The first N tables accessed by the query job. Older queries - that reference a large number of tables may not have all of - their tables in this list. You can use the - total_tables_processed count to know how many total tables - were read in the query. For new queries, there is currently - no limit. - total_tables_processed (int): - Total number of unique tables referenced in - the query. - referenced_views (Sequence[google.cloud.bigquery_logging_v1.types.TableName]): - The first N views accessed by the query job. Older queries - that reference a large number of views may not have all of - their views in this list. You can use the - total_tables_processed count to know how many total tables - were read in the query. For new queries, there is currently - no limit. - total_views_processed (int): - Total number of unique views referenced in - the query. - query_output_row_count (int): - Number of output rows produced by the query - job. - total_load_output_bytes (int): - Total bytes loaded for an import job. - """ - - class ReservationResourceUsage(proto.Message): - r"""Job resource usage breakdown by reservation. - Attributes: - name (str): - Reservation name or "unreserved" for on- - emand resources usage. - slot_ms (int): - Total slot milliseconds used by the - reservation for a particular job. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - slot_ms = proto.Field( - proto.INT64, - number=2, - ) - - create_time = proto.Field( - proto.MESSAGE, - number=1, - message=timestamp_pb2.Timestamp, - ) - start_time = proto.Field( - proto.MESSAGE, - number=2, - message=timestamp_pb2.Timestamp, - ) - end_time = proto.Field( - proto.MESSAGE, - number=3, - message=timestamp_pb2.Timestamp, - ) - total_processed_bytes = proto.Field( - proto.INT64, - number=4, - ) - total_billed_bytes = proto.Field( - proto.INT64, - number=5, - ) - billing_tier = proto.Field( - proto.INT32, - number=7, - ) - total_slot_ms = proto.Field( - proto.INT64, - number=8, - ) - reservation_usage = proto.RepeatedField( - proto.MESSAGE, - number=14, - message=ReservationResourceUsage, - ) - referenced_tables = proto.RepeatedField( - proto.MESSAGE, - number=9, - message='TableName', - ) - total_tables_processed = proto.Field( - proto.INT32, - number=10, - ) - referenced_views = proto.RepeatedField( - proto.MESSAGE, - number=11, - message='TableName', - ) - total_views_processed = proto.Field( - proto.INT32, - number=12, - ) - query_output_row_count = proto.Field( - proto.INT64, - number=15, - ) - total_load_output_bytes = proto.Field( - proto.INT64, - number=13, - ) - - -class DatasetName(proto.Message): - r"""The fully-qualified name for a dataset. - Attributes: - project_id (str): - The project ID. - dataset_id (str): - The dataset ID within the project. - """ - - project_id = proto.Field( - proto.STRING, - number=1, - ) - dataset_id = proto.Field( - proto.STRING, - number=2, - ) - - -class TableName(proto.Message): - r"""The fully-qualified name for a table. - Attributes: - project_id (str): - The project ID. - dataset_id (str): - The dataset ID within the project. - table_id (str): - The table ID of the table within the dataset. - """ - - project_id = proto.Field( - proto.STRING, - number=1, - ) - dataset_id = proto.Field( - proto.STRING, - number=2, - ) - table_id = proto.Field( - proto.STRING, - number=3, - ) - - -class JobName(proto.Message): - r"""The fully-qualified name for a job. - Attributes: - project_id (str): - The project ID. - job_id (str): - The job ID within the project. - location (str): - The job location. - """ - - project_id = proto.Field( - proto.STRING, - number=1, - ) - job_id = proto.Field( - proto.STRING, - number=2, - ) - location = proto.Field( - proto.STRING, - number=3, - ) - - -class EncryptionInfo(proto.Message): - r"""Describes encryption properties for a table or a job - Attributes: - kms_key_name (str): - unique identifier for cloud kms key - """ - - kms_key_name = proto.Field( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/mypy.ini b/owl-bot-staging/v1/mypy.ini deleted file mode 100644 index 4505b48..0000000 --- a/owl-bot-staging/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.6 -namespace_packages = True diff --git a/owl-bot-staging/v1/noxfile.py b/owl-bot-staging/v1/noxfile.py deleted file mode 100644 index bcac2b8..0000000 --- a/owl-bot-staging/v1/noxfile.py +++ /dev/null @@ -1,132 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import shutil -import subprocess -import sys - - -import nox # type: ignore - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") - - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", -] - -@nox.session(python=['3.6', '3.7', '3.8', '3.9']) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/bigquery_logging_v1/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python='3.7') -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=['3.6', '3.7']) -def mypy(session): - """Run the type checker.""" - session.install('mypy', 'types-pkg_resources') - session.install('.') - session.run( - 'mypy', - '--explicit-package-bases', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python='3.6') -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx<3.0.0", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) diff --git a/owl-bot-staging/v1/scripts/fixup_bigquery_logging_v1_keywords.py b/owl-bot-staging/v1/scripts/fixup_bigquery_logging_v1_keywords.py deleted file mode 100644 index 4f0341f..0000000 --- a/owl-bot-staging/v1/scripts/fixup_bigquery_logging_v1_keywords.py +++ /dev/null @@ -1,175 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class bigquery_loggingCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: not a.keyword.value in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=bigquery_loggingCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the bigquery_logging client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v1/setup.py b/owl-bot-staging/v1/setup.py deleted file mode 100644 index 64273f7..0000000 --- a/owl-bot-staging/v1/setup.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import setuptools # type: ignore - -version = '0.1.0' - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, 'README.rst') -with io.open(readme_filename, encoding='utf-8') as readme_file: - readme = readme_file.read() - -setuptools.setup( - name='google-cloud-bigquery-logging', - version=version, - long_description=readme, - packages=setuptools.PEP420PackageFinder.find(), - namespace_packages=('google', 'google.cloud'), - platforms='Posix; MacOS X; Windows', - include_package_data=True, - install_requires=( - 'google-api-core[grpc] >= 1.27.0, < 2.0.0dev', - 'libcst >= 0.2.5', - 'proto-plus >= 1.15.0', - 'packaging >= 14.3', 'grpc-google-iam-v1 >= 0.12.3, < 0.13dev', ), - python_requires='>=3.6', - classifiers=[ - 'Development Status :: 3 - Alpha', - 'Intended Audience :: Developers', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Topic :: Internet', - 'Topic :: Software Development :: Libraries :: Python Modules', - ], - zip_safe=False, -) diff --git a/owl-bot-staging/v1/tests/__init__.py b/owl-bot-staging/v1/tests/__init__.py deleted file mode 100644 index b54a5fc..0000000 --- a/owl-bot-staging/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/__init__.py b/owl-bot-staging/v1/tests/unit/__init__.py deleted file mode 100644 index b54a5fc..0000000 --- a/owl-bot-staging/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index b54a5fc..0000000 --- a/owl-bot-staging/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/gapic/bigquery_logging_v1/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/bigquery_logging_v1/__init__.py deleted file mode 100644 index b54a5fc..0000000 --- a/owl-bot-staging/v1/tests/unit/gapic/bigquery_logging_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -#