From 8ed9094df80db87caa9852279be76d69783dc9c3 Mon Sep 17 00:00:00 2001 From: Yoshi Automation Bot Date: Thu, 25 Jun 2020 10:58:07 -0700 Subject: [PATCH] feat: add support for secondary aggregation and Monitoring Query Language (#22) --- .coveragerc | 16 + .flake8 | 18 + .github/ISSUE_TEMPLATE/bug_report.md | 3 +- .gitignore | 2 + .kokoro/publish-docs.sh | 2 - .kokoro/release.sh | 2 - .kokoro/samples/lint/common.cfg | 34 + .kokoro/samples/lint/continuous.cfg | 6 + .kokoro/samples/lint/periodic.cfg | 6 + .kokoro/samples/lint/presubmit.cfg | 6 + .kokoro/samples/python3.6/common.cfg | 34 + .kokoro/samples/python3.6/continuous.cfg | 7 + .kokoro/samples/python3.6/periodic.cfg | 6 + .kokoro/samples/python3.6/presubmit.cfg | 6 + .kokoro/samples/python3.7/common.cfg | 34 + .kokoro/samples/python3.7/continuous.cfg | 6 + .kokoro/samples/python3.7/periodic.cfg | 6 + .kokoro/samples/python3.7/presubmit.cfg | 6 + .kokoro/samples/python3.8/common.cfg | 34 + .kokoro/samples/python3.8/continuous.cfg | 6 + .kokoro/samples/python3.8/periodic.cfg | 6 + .kokoro/samples/python3.8/presubmit.cfg | 6 + .kokoro/test-samples.sh | 104 +++ CONTRIBUTING.rst | 15 +- MANIFEST.in | 19 + docs/conf.py | 9 +- docs/gapic/v1/api.rst | 4 +- docs/gapic/v1/types.rst | 4 +- docs/index.rst | 2 + docs/multiprocessing.rst | 7 + google/cloud/monitoring_dashboard/v1.py | 6 +- .../cloud/monitoring_dashboard/v1/__init__.py | 10 +- .../v1/gapic/dashboards_service_client.py | 188 ++--- .../monitoring_dashboard/v1/gapic/enums.py | 650 ++++++------------ .../dashboards_service_grpc_transport.py | 88 +-- .../v1/proto/common.proto | 449 ++++++++++++ .../v1/proto/common_pb2.py | 409 ++++++++--- .../v1/proto/common_pb2_grpc.py | 2 +- .../v1/proto/dashboard.proto | 66 ++ .../v1/proto/dashboard_pb2.py | 72 +- .../v1/proto/dashboard_pb2_grpc.py | 2 +- .../v1/proto/dashboards_service.proto | 179 +++++ .../v1/proto/dashboards_service_pb2.py | 182 ++--- .../v1/proto/dashboards_service_pb2_grpc.py | 42 +- .../v1/proto/drilldowns.proto | 25 + .../v1/proto/drilldowns_pb2.py | 26 +- .../v1/proto/drilldowns_pb2_grpc.py | 2 +- .../v1/proto/layouts.proto | 74 ++ .../v1/proto/layouts_pb2.py | 78 +-- .../v1/proto/layouts_pb2_grpc.py | 2 +- .../v1/proto/metrics.proto | 174 +++++ .../v1/proto/metrics_pb2.py | 284 +++++--- .../v1/proto/metrics_pb2_grpc.py | 2 +- .../v1/proto/scorecard.proto | 111 +++ .../v1/proto/scorecard_pb2.py | 95 +-- .../v1/proto/scorecard_pb2_grpc.py | 2 +- .../v1/proto/service.proto | 23 + .../v1/proto/service_pb2.py | 18 +- .../v1/proto/service_pb2_grpc.py | 2 +- .../monitoring_dashboard/v1/proto/text.proto | 44 ++ .../monitoring_dashboard/v1/proto/text_pb2.py | 44 +- .../v1/proto/text_pb2_grpc.py | 2 +- .../v1/proto/widget.proto | 51 ++ .../v1/proto/widget_pb2.py | 56 +- .../v1/proto/widget_pb2_grpc.py | 2 +- .../v1/proto/xychart.proto | 146 ++++ .../v1/proto/xychart_pb2.py | 184 +++-- .../v1/proto/xychart_pb2_grpc.py | 2 +- google/cloud/monitoring_dashboard/v1/types.py | 5 +- noxfile.py | 32 +- scripts/decrypt-secrets.sh | 33 + scripts/readme-gen/readme_gen.py | 66 ++ scripts/readme-gen/templates/README.tmpl.rst | 87 +++ scripts/readme-gen/templates/auth.tmpl.rst | 9 + .../templates/auth_api_key.tmpl.rst | 14 + .../templates/install_deps.tmpl.rst | 29 + .../templates/install_portaudio.tmpl.rst | 35 + setup.cfg | 16 + synth.metadata | 30 +- synth.py | 5 +- testing/.gitignore | 3 + .../v1/test_dashboards_service_client_v1.py | 12 +- 82 files changed, 3320 insertions(+), 1266 deletions(-) create mode 100644 .kokoro/samples/lint/common.cfg create mode 100644 .kokoro/samples/lint/continuous.cfg create mode 100644 .kokoro/samples/lint/periodic.cfg create mode 100644 .kokoro/samples/lint/presubmit.cfg create mode 100644 .kokoro/samples/python3.6/common.cfg create mode 100644 .kokoro/samples/python3.6/continuous.cfg create mode 100644 .kokoro/samples/python3.6/periodic.cfg create mode 100644 .kokoro/samples/python3.6/presubmit.cfg create mode 100644 .kokoro/samples/python3.7/common.cfg create mode 100644 .kokoro/samples/python3.7/continuous.cfg create mode 100644 .kokoro/samples/python3.7/periodic.cfg create mode 100644 .kokoro/samples/python3.7/presubmit.cfg create mode 100644 .kokoro/samples/python3.8/common.cfg create mode 100644 .kokoro/samples/python3.8/continuous.cfg create mode 100644 .kokoro/samples/python3.8/periodic.cfg create mode 100644 .kokoro/samples/python3.8/presubmit.cfg create mode 100755 .kokoro/test-samples.sh create mode 100644 docs/multiprocessing.rst create mode 100644 google/cloud/monitoring_dashboard/v1/proto/common.proto create mode 100644 google/cloud/monitoring_dashboard/v1/proto/dashboard.proto create mode 100644 google/cloud/monitoring_dashboard/v1/proto/dashboards_service.proto create mode 100644 google/cloud/monitoring_dashboard/v1/proto/drilldowns.proto create mode 100644 google/cloud/monitoring_dashboard/v1/proto/layouts.proto create mode 100644 google/cloud/monitoring_dashboard/v1/proto/metrics.proto create mode 100644 google/cloud/monitoring_dashboard/v1/proto/scorecard.proto create mode 100644 google/cloud/monitoring_dashboard/v1/proto/service.proto create mode 100644 google/cloud/monitoring_dashboard/v1/proto/text.proto create mode 100644 google/cloud/monitoring_dashboard/v1/proto/widget.proto create mode 100644 google/cloud/monitoring_dashboard/v1/proto/xychart.proto create mode 100755 scripts/decrypt-secrets.sh create mode 100644 scripts/readme-gen/readme_gen.py create mode 100644 scripts/readme-gen/templates/README.tmpl.rst create mode 100644 scripts/readme-gen/templates/auth.tmpl.rst create mode 100644 scripts/readme-gen/templates/auth_api_key.tmpl.rst create mode 100644 scripts/readme-gen/templates/install_deps.tmpl.rst create mode 100644 scripts/readme-gen/templates/install_portaudio.tmpl.rst create mode 100644 testing/.gitignore diff --git a/.coveragerc b/.coveragerc index b178b09..dd39c85 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,3 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # Generated by synthtool. DO NOT EDIT! [run] branch = True diff --git a/.flake8 b/.flake8 index 0268ecc..ed93163 100644 --- a/.flake8 +++ b/.flake8 @@ -1,3 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # Generated by synthtool. DO NOT EDIT! [flake8] ignore = E203, E266, E501, W503 @@ -5,6 +21,8 @@ exclude = # Exclude generated code. **/proto/** **/gapic/** + **/services/** + **/types/** *_pb2.py # Standard linting exemptions. diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index e6fafa2..192b6a5 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -11,8 +11,7 @@ Thanks for stopping by to let us know something could be better! Please run down the following list and make sure you've tried the usual "quick fixes": - Search the issues already opened: https://github.com/googleapis/python-monitoring-dashboards/issues - - Search the issues on our "catch-all" repository: https://github.com/googleapis/google-cloud-python - - Search StackOverflow: http://stackoverflow.com/questions/tagged/google-cloud-platform+python + - Search StackOverflow: https://stackoverflow.com/questions/tagged/google-cloud-platform+python If you are still having issues, please be sure to include as much information as possible: diff --git a/.gitignore b/.gitignore index 3fb06e0..b87e1ed 100644 --- a/.gitignore +++ b/.gitignore @@ -10,6 +10,7 @@ dist build eggs +.eggs parts bin var @@ -49,6 +50,7 @@ bigquery/docs/generated # Virtual environment env/ coverage.xml +sponge_log.xml # System test environment variables. system_tests/local_test_setup diff --git a/.kokoro/publish-docs.sh b/.kokoro/publish-docs.sh index 8182174..2aa25f2 100755 --- a/.kokoro/publish-docs.sh +++ b/.kokoro/publish-docs.sh @@ -13,8 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -#!/bin/bash - set -eo pipefail # Disable buffering, so that the logs stream through. diff --git a/.kokoro/release.sh b/.kokoro/release.sh index ad8f6db..585acfd 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -13,8 +13,6 @@ # See the License for the specific language governing permissions and # limitations under the License. -#!/bin/bash - set -eo pipefail # Start the releasetool reporter diff --git a/.kokoro/samples/lint/common.cfg b/.kokoro/samples/lint/common.cfg new file mode 100644 index 0000000..c096483 --- /dev/null +++ b/.kokoro/samples/lint/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "lint" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-monitoring-dashboards/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-monitoring-dashboards/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/.kokoro/samples/lint/continuous.cfg b/.kokoro/samples/lint/continuous.cfg new file mode 100644 index 0000000..a1c8d97 --- /dev/null +++ b/.kokoro/samples/lint/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/lint/periodic.cfg b/.kokoro/samples/lint/periodic.cfg new file mode 100644 index 0000000..50fec96 --- /dev/null +++ b/.kokoro/samples/lint/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/.kokoro/samples/lint/presubmit.cfg b/.kokoro/samples/lint/presubmit.cfg new file mode 100644 index 0000000..a1c8d97 --- /dev/null +++ b/.kokoro/samples/lint/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg new file mode 100644 index 0000000..334b52f --- /dev/null +++ b/.kokoro/samples/python3.6/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.6" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-monitoring-dashboards/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-monitoring-dashboards/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.6/continuous.cfg b/.kokoro/samples/python3.6/continuous.cfg new file mode 100644 index 0000000..7218af1 --- /dev/null +++ b/.kokoro/samples/python3.6/continuous.cfg @@ -0,0 +1,7 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} + diff --git a/.kokoro/samples/python3.6/periodic.cfg b/.kokoro/samples/python3.6/periodic.cfg new file mode 100644 index 0000000..50fec96 --- /dev/null +++ b/.kokoro/samples/python3.6/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.6/presubmit.cfg b/.kokoro/samples/python3.6/presubmit.cfg new file mode 100644 index 0000000..a1c8d97 --- /dev/null +++ b/.kokoro/samples/python3.6/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg new file mode 100644 index 0000000..8506339 --- /dev/null +++ b/.kokoro/samples/python3.7/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.7" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-monitoring-dashboards/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-monitoring-dashboards/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.7/continuous.cfg b/.kokoro/samples/python3.7/continuous.cfg new file mode 100644 index 0000000..a1c8d97 --- /dev/null +++ b/.kokoro/samples/python3.7/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.7/periodic.cfg b/.kokoro/samples/python3.7/periodic.cfg new file mode 100644 index 0000000..50fec96 --- /dev/null +++ b/.kokoro/samples/python3.7/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.7/presubmit.cfg b/.kokoro/samples/python3.7/presubmit.cfg new file mode 100644 index 0000000..a1c8d97 --- /dev/null +++ b/.kokoro/samples/python3.7/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg new file mode 100644 index 0000000..89f0c6c --- /dev/null +++ b/.kokoro/samples/python3.8/common.cfg @@ -0,0 +1,34 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +# Build logs will be here +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} + +# Specify which tests to run +env_vars: { + key: "RUN_TESTS_SESSION" + value: "py-3.8" +} + +env_vars: { + key: "TRAMPOLINE_BUILD_FILE" + value: "github/python-monitoring-dashboards/.kokoro/test-samples.sh" +} + +# Configure the docker image for kokoro-trampoline. +env_vars: { + key: "TRAMPOLINE_IMAGE" + value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker" +} + +# Download secrets for samples +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples" + +# Download trampoline resources. +gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline" + +# Use the trampoline script to run in docker. +build_file: "python-monitoring-dashboards/.kokoro/trampoline.sh" \ No newline at end of file diff --git a/.kokoro/samples/python3.8/continuous.cfg b/.kokoro/samples/python3.8/continuous.cfg new file mode 100644 index 0000000..a1c8d97 --- /dev/null +++ b/.kokoro/samples/python3.8/continuous.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.8/periodic.cfg b/.kokoro/samples/python3.8/periodic.cfg new file mode 100644 index 0000000..50fec96 --- /dev/null +++ b/.kokoro/samples/python3.8/periodic.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "False" +} \ No newline at end of file diff --git a/.kokoro/samples/python3.8/presubmit.cfg b/.kokoro/samples/python3.8/presubmit.cfg new file mode 100644 index 0000000..a1c8d97 --- /dev/null +++ b/.kokoro/samples/python3.8/presubmit.cfg @@ -0,0 +1,6 @@ +# Format: //devtools/kokoro/config/proto/build.proto + +env_vars: { + key: "INSTALL_LIBRARY_FROM_SOURCE" + value: "True" +} \ No newline at end of file diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh new file mode 100755 index 0000000..2b9c411 --- /dev/null +++ b/.kokoro/test-samples.sh @@ -0,0 +1,104 @@ +#!/bin/bash +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +# `-e` enables the script to automatically fail when a command fails +# `-o pipefail` sets the exit code to the rightmost comment to exit with a non-zero +set -eo pipefail +# Enables `**` to include files nested inside sub-folders +shopt -s globstar + +cd github/python-monitoring-dashboards + +# Run periodic samples tests at latest release +if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + LATEST_RELEASE=$(git describe --abbrev=0 --tags) + git checkout $LATEST_RELEASE +fi + +# Disable buffering, so that the logs stream through. +export PYTHONUNBUFFERED=1 + +# Debug: show build environment +env | grep KOKORO + +# Install nox +python3.6 -m pip install --upgrade --quiet nox + +# Use secrets acessor service account to get secrets +if [[ -f "${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" ]]; then + gcloud auth activate-service-account \ + --key-file="${KOKORO_GFILE_DIR}/secrets_viewer_service_account.json" \ + --project="cloud-devrel-kokoro-resources" +fi + +# This script will create 3 files: +# - testing/test-env.sh +# - testing/service-account.json +# - testing/client-secrets.json +./scripts/decrypt-secrets.sh + +source ./testing/test-env.sh +export GOOGLE_APPLICATION_CREDENTIALS=$(pwd)/testing/service-account.json + +# For cloud-run session, we activate the service account for gcloud sdk. +gcloud auth activate-service-account \ + --key-file "${GOOGLE_APPLICATION_CREDENTIALS}" + +export GOOGLE_CLIENT_SECRETS=$(pwd)/testing/client-secrets.json + +echo -e "\n******************** TESTING PROJECTS ********************" + +# Switch to 'fail at end' to allow all tests to complete before exiting. +set +e +# Use RTN to return a non-zero value if the test fails. +RTN=0 +ROOT=$(pwd) +# Find all requirements.txt in the samples directory (may break on whitespace). +for file in samples/**/requirements.txt; do + cd "$ROOT" + # Navigate to the project folder. + file=$(dirname "$file") + cd "$file" + + echo "------------------------------------------------------------" + echo "- testing $file" + echo "------------------------------------------------------------" + + # Use nox to execute the tests for the project. + python3.6 -m nox -s "$RUN_TESTS_SESSION" + EXIT=$? + + # If this is a periodic build, send the test log to the Build Cop Bot. + # See https://github.com/googleapis/repo-automation-bots/tree/master/packages/buildcop. + if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then + chmod +x $KOKORO_GFILE_DIR/linux_amd64/buildcop + $KOKORO_GFILE_DIR/linux_amd64/buildcop + fi + + if [[ $EXIT -ne 0 ]]; then + RTN=1 + echo -e "\n Testing failed: Nox returned a non-zero exit code. \n" + else + echo -e "\n Testing completed.\n" + fi + +done +cd "$ROOT" + +# Workaround for Kokoro permissions issue: delete secrets +rm testing/{test-env.sh,client-secrets.json,service-account.json} + +exit "$RTN" \ No newline at end of file diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index d91e311..518cb41 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -22,7 +22,7 @@ In order to add a feature: documentation. - The feature must work fully on the following CPython versions: 2.7, - 3.5, 3.6, and 3.7 on both UNIX and Windows. + 3.5, 3.6, 3.7 and 3.8 on both UNIX and Windows. - The feature must not add unnecessary dependencies (where "unnecessary" is of course subjective, but new dependencies should @@ -214,26 +214,18 @@ We support: - `Python 3.5`_ - `Python 3.6`_ - `Python 3.7`_ +- `Python 3.8`_ .. _Python 3.5: https://docs.python.org/3.5/ .. _Python 3.6: https://docs.python.org/3.6/ .. _Python 3.7: https://docs.python.org/3.7/ +.. _Python 3.8: https://docs.python.org/3.8/ Supported versions can be found in our ``noxfile.py`` `config`_. .. _config: https://github.com/googleapis/python-monitoring-dashboards/blob/master/noxfile.py -We explicitly decided not to support `Python 2.5`_ due to `decreased usage`_ -and lack of continuous integration `support`_. - -.. _Python 2.5: https://docs.python.org/2.5/ -.. _decreased usage: https://caremad.io/2013/10/a-look-at-pypi-downloads/ -.. _support: https://blog.travis-ci.com/2013-11-18-upcoming-build-environment-updates/ - -We have `dropped 2.6`_ as a supported version as well since Python 2.6 is no -longer supported by the core development team. - Python 2.7 support is deprecated. All code changes should maintain Python 2.7 compatibility until January 1, 2020. We also explicitly decided to support Python 3 beginning with version @@ -247,7 +239,6 @@ We also explicitly decided to support Python 3 beginning with version .. _prominent: https://docs.djangoproject.com/en/1.9/faq/install/#what-python-version-can-i-use-with-django .. _projects: http://flask.pocoo.org/docs/0.10/python3/ .. _Unicode literal support: https://www.python.org/dev/peps/pep-0414/ -.. _dropped 2.6: https://github.com/googleapis/google-cloud-python/issues/995 ********** Versioning diff --git a/MANIFEST.in b/MANIFEST.in index cd011be..e9e29d1 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,6 +1,25 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # Generated by synthtool. DO NOT EDIT! include README.rst LICENSE recursive-include google *.json *.proto recursive-include tests * global-exclude *.py[co] global-exclude __pycache__ + +# Exclude scripts for samples readmegen +prune scripts/readme-gen \ No newline at end of file diff --git a/docs/conf.py b/docs/conf.py index bf9e8ea..acc1a42 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -38,21 +38,18 @@ "sphinx.ext.napoleon", "sphinx.ext.todo", "sphinx.ext.viewcode", + "recommonmark", ] # autodoc/autosummary flags autoclass_content = "both" -autodoc_default_flags = ["members"] +autodoc_default_options = {"members": True} autosummary_generate = True # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] @@ -340,7 +337,7 @@ intersphinx_mapping = { "python": ("http://python.readthedocs.org/en/latest/", None), "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), "grpc": ("https://grpc.io/grpc/python/", None), } diff --git a/docs/gapic/v1/api.rst b/docs/gapic/v1/api.rst index 1489bc2..180c1c8 100644 --- a/docs/gapic/v1/api.rst +++ b/docs/gapic/v1/api.rst @@ -1,5 +1,5 @@ -Client for Stackdriver Monitoring Dashboards API -================================================ +Client for Cloud Monitoring API +=============================== .. automodule:: google.cloud.monitoring_dashboard.v1 :members: diff --git a/docs/gapic/v1/types.rst b/docs/gapic/v1/types.rst index 34bd82c..97bef83 100644 --- a/docs/gapic/v1/types.rst +++ b/docs/gapic/v1/types.rst @@ -1,5 +1,5 @@ -Types for Stackdriver Monitoring Dashboards API Client -====================================================== +Types for Cloud Monitoring API Client +===================================== .. automodule:: google.cloud.monitoring_dashboard.v1.types :members: \ No newline at end of file diff --git a/docs/index.rst b/docs/index.rst index 848fb50..18af716 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,5 +1,7 @@ .. include:: README.rst +.. include:: multiprocessing.rst + Api Reference ------------- .. toctree:: diff --git a/docs/multiprocessing.rst b/docs/multiprocessing.rst new file mode 100644 index 0000000..1cb29d4 --- /dev/null +++ b/docs/multiprocessing.rst @@ -0,0 +1,7 @@ +.. note:: + + Because this client uses :mod:`grpcio` library, it is safe to + share instances across threads. In multiprocessing scenarios, the best + practice is to create client instances *after* the invocation of + :func:`os.fork` by :class:`multiprocessing.Pool` or + :class:`multiprocessing.Process`. diff --git a/google/cloud/monitoring_dashboard/v1.py b/google/cloud/monitoring_dashboard/v1.py index 2094742..75e49ff 100644 --- a/google/cloud/monitoring_dashboard/v1.py +++ b/google/cloud/monitoring_dashboard/v1.py @@ -22,4 +22,8 @@ from google.cloud.monitoring_dashboard.v1 import types -__all__ = ("enums", "types", "DashboardsServiceClient") +__all__ = ( + "enums", + "types", + "DashboardsServiceClient", +) diff --git a/google/cloud/monitoring_dashboard/v1/__init__.py b/google/cloud/monitoring_dashboard/v1/__init__.py index f70bddf..ad3d901 100644 --- a/google/cloud/monitoring_dashboard/v1/__init__.py +++ b/google/cloud/monitoring_dashboard/v1/__init__.py @@ -26,8 +26,8 @@ if sys.version_info[:2] == (2, 7): message = ( - "A future version of this library will drop support for Python 2.7." - "More details about Python 2 support for Google Cloud Client Libraries" + "A future version of this library will drop support for Python 2.7. " + "More details about Python 2 support for Google Cloud Client Libraries " "can be found at https://cloud.google.com/python/docs/python2-sunset/" ) warnings.warn(message, DeprecationWarning) @@ -38,4 +38,8 @@ class DashboardsServiceClient(dashboards_service_client.DashboardsServiceClient) enums = enums -__all__ = ("enums", "types", "DashboardsServiceClient") +__all__ = ( + "enums", + "types", + "DashboardsServiceClient", +) diff --git a/google/cloud/monitoring_dashboard/v1/gapic/dashboards_service_client.py b/google/cloud/monitoring_dashboard/v1/gapic/dashboards_service_client.py index 3be2010..38e05d3 100644 --- a/google/cloud/monitoring_dashboard/v1/gapic/dashboards_service_client.py +++ b/google/cloud/monitoring_dashboard/v1/gapic/dashboards_service_client.py @@ -28,6 +28,7 @@ import google.api_core.gapic_v1.routing_header import google.api_core.grpc_helpers import google.api_core.page_iterator +import google.api_core.path_template import grpc from google.cloud.monitoring_dashboard.v1.gapic import dashboards_service_client_config @@ -42,7 +43,7 @@ _GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - "google-cloud-monitoring-dashboards" + "google-cloud-monitoring-dashboards", ).version @@ -79,6 +80,22 @@ def from_service_account_file(cls, filename, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def dashboard_path(cls, project, dashboard): + """Return a fully-qualified dashboard string.""" + return google.api_core.path_template.expand( + "projects/{project}/dashboards/{dashboard}", + project=project, + dashboard=dashboard, + ) + + @classmethod + def project_path(cls, project): + """Return a fully-qualified project string.""" + return google.api_core.path_template.expand( + "projects/{project}", project=project, + ) + def __init__( self, transport=None, @@ -166,12 +183,12 @@ def __init__( self.transport = transport else: self.transport = dashboards_service_grpc_transport.DashboardsServiceGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials + address=api_endpoint, channel=channel, credentials=credentials, ) if client_info is None: client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION + gapic_version=_GAPIC_LIBRARY_VERSION, ) else: client_info.gapic_version = _GAPIC_LIBRARY_VERSION @@ -182,7 +199,7 @@ def __init__( # (Ordinarily, these are the defaults specified in the `*_config.py` # file next to this one.) self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME] + client_config["interfaces"][self._INTERFACE_NAME], ) # Save a dictionary of cached API call functions. @@ -201,22 +218,11 @@ def create_dashboard( metadata=None, ): """ - Identifies which part of the FileDescriptorProto was defined at this - location. - - Each element is a field number or an index. They form a path from the - root FileDescriptorProto to the place where the definition. For example, - this path: [ 4, 3, 2, 7, 1 ] refers to: file.message_type(3) // 4, 3 - .field(7) // 2, 7 .name() // 1 This is because - FileDescriptorProto.message_type has field number 4: repeated - DescriptorProto message_type = 4; and DescriptorProto.field has field - number 2: repeated FieldDescriptorProto field = 2; and - FieldDescriptorProto.name has field number 1: optional string name = 1; - - Thus, the above path gives the location of a field name. If we removed - the last element: [ 4, 3, 2, 7 ] this path refers to the whole field - declaration (from the beginning of the label to the terminating - semicolon). + Creates a new custom dashboard. + + This method requires the ``monitoring.dashboards.create`` permission on + the specified project. For more information, see `Google Cloud + IAM `__. Example: >>> from google.cloud.monitoring_dashboard import v1 @@ -232,8 +238,14 @@ def create_dashboard( >>> response = client.create_dashboard(parent, dashboard) Args: - parent (str): Input and output type names. These are resolved in the same way as - FieldDescriptorProto.type_name, but must refer to a message type. + parent (str): Required. The project on which to execute the request. The format + is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + The ``[PROJECT_ID_OR_NUMBER]`` must match the dashboard resource name. dashboard (Union[dict, ~google.cloud.monitoring_dashboard.v1.types.Dashboard]): Required. The initial dashboard specification. If a dict is provided, it must be of the same form as the protobuf @@ -269,7 +281,7 @@ def create_dashboard( ) request = dashboards_service_pb2.CreateDashboardRequest( - parent=parent, dashboard=dashboard + parent=parent, dashboard=dashboard, ) if metadata is None: metadata = [] @@ -297,16 +309,18 @@ def list_dashboards( metadata=None, ): """ - An annotation that describes a resource definition without a - corresponding message; see ``ResourceDescriptor``. + Lists the existing dashboards. + + This method requires the ``monitoring.dashboards.list`` permission on + the specified project. For more information, see `Google Cloud + IAM `__. Example: >>> from google.cloud.monitoring_dashboard import v1 >>> >>> client = v1.DashboardsServiceClient() >>> - >>> # TODO: Initialize `parent`: - >>> parent = '' + >>> parent = client.project_path('[PROJECT]') >>> >>> # Iterate over all results >>> for element in client.list_dashboards(parent): @@ -323,10 +337,11 @@ def list_dashboards( ... pass Args: - parent (str): Reduce by computing the count of True-valued data points across time - series for each alignment period. This reducer is valid for delta and - gauge metrics of Boolean value type. The value type of the output is - ``INT64``. + parent (str): Required. The scope of the dashboards to list. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] page_size (int): The maximum number of resources contained in the underlying API response. If page streaming is performed per- resource, this parameter does not affect the return value. If page @@ -366,7 +381,7 @@ def list_dashboards( ) request = dashboards_service_pb2.ListDashboardsRequest( - parent=parent, page_size=page_size + parent=parent, page_size=page_size, ) if metadata is None: metadata = [] @@ -404,59 +419,27 @@ def get_dashboard( metadata=None, ): """ - Align and convert to a percentage change. This alignment is valid - for gauge and delta metrics with numeric values. This alignment - conceptually computes the equivalent of "((current - - previous)/previous)*100" where previous value is determined based on the - alignmentPeriod. In the event that previous is 0 the calculated value is - infinity with the exception that if both (current - previous) and - previous are 0 the calculated value is 0. A 10 minute moving mean is - computed at each point of the time window prior to the above calculation - to smooth the metric and prevent false positives from very short lived - spikes. Only applicable for data that is >= 0. Any values < 0 are - treated as no data. While delta metrics are accepted by this alignment - special care should be taken that the values for the metric will always - be positive. The output is a gauge metric with value type ``DOUBLE``. + Fetches a specific dashboard. + + This method requires the ``monitoring.dashboards.get`` permission on the + specified dashboard. For more information, see `Google Cloud + IAM `__. Example: >>> from google.cloud.monitoring_dashboard import v1 >>> >>> client = v1.DashboardsServiceClient() >>> - >>> # TODO: Initialize `name`: - >>> name = '' + >>> name = client.dashboard_path('[PROJECT]', '[DASHBOARD]') >>> >>> response = client.get_dashboard(name) Args: - name (str): Should this field be parsed lazily? Lazy applies only to - message-type fields. It means that when the outer message is initially - parsed, the inner message's contents will not be parsed but instead - stored in encoded form. The inner message will actually be parsed when - it is first accessed. - - This is only a hint. Implementations are free to choose whether to use - eager or lazy parsing regardless of the value of this option. However, - setting this option true suggests that the protocol author believes that - using lazy parsing on this field is worth the additional bookkeeping - overhead typically needed to implement it. - - This option does not affect the public interface of any generated code; - all method signatures remain the same. Furthermore, thread-safety of the - interface is not affected by this option; const methods remain safe to - call from multiple threads concurrently, while non-const methods - continue to require exclusive access. - - Note that implementations may choose not to check required fields within - a lazy sub-message. That is, calling IsInitialized() on the outer - message may return true even if the inner message has missing required - fields. This is necessary because otherwise the inner message would have - to be parsed in order to perform the check, defeating the purpose of - lazy parsing. An implementation which chooses not to check required - fields must be consistent about it. That is, for any particular - sub-message, the implementation must either *always* check its required - fields, or *never* check its required fields, regardless of whether or - not the message has been parsed. + name (str): Required. The resource name of the Dashboard. The format is one of: + + - ``dashboards/[DASHBOARD_ID]`` (for system dashboards) + - ``projects/[PROJECT_ID_OR_NUMBER]/dashboards/[DASHBOARD_ID]`` (for + custom dashboards). retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -487,7 +470,7 @@ def get_dashboard( client_info=self._client_info, ) - request = dashboards_service_pb2.GetDashboardRequest(name=name) + request = dashboards_service_pb2.GetDashboardRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -513,50 +496,27 @@ def delete_dashboard( metadata=None, ): """ - Protocol Buffers - Google's data interchange format Copyright 2008 - Google Inc. All rights reserved. - https://developers.google.com/protocol-buffers/ - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions are - met: - - :: - - * Redistributions of source code must retain the above copyright - - notice, this list of conditions and the following disclaimer. \* - Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. \* - Neither the name of Google Inc. nor the names of its contributors may be - used to endorse or promote products derived from this software without - specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS - IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED - TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A - PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER - OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, - EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR - PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF - LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING - NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS - SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + Deletes an existing custom dashboard. + + This method requires the ``monitoring.dashboards.delete`` permission on + the specified dashboard. For more information, see `Google Cloud + IAM `__. Example: >>> from google.cloud.monitoring_dashboard import v1 >>> >>> client = v1.DashboardsServiceClient() >>> - >>> # TODO: Initialize `name`: - >>> name = '' + >>> name = client.dashboard_path('[PROJECT]', '[DASHBOARD]') >>> >>> client.delete_dashboard(name) Args: - name (str): The ``CreateDashboard`` request. + name (str): Required. The resource name of the Dashboard. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/dashboards/[DASHBOARD_ID] retry (Optional[google.api_core.retry.Retry]): A retry object used to retry requests. If ``None`` is specified, requests will be retried using a default configuration. @@ -584,7 +544,7 @@ def delete_dashboard( client_info=self._client_info, ) - request = dashboards_service_pb2.DeleteDashboardRequest(name=name) + request = dashboards_service_pb2.DeleteDashboardRequest(name=name,) if metadata is None: metadata = [] metadata = list(metadata) @@ -610,9 +570,9 @@ def update_dashboard( metadata=None, ): """ - Deletes an existing custom dashboard. + Replaces an existing custom dashboard with a new definition. - This method requires the ``monitoring.dashboards.delete`` permission on + This method requires the ``monitoring.dashboards.update`` permission on the specified dashboard. For more information, see `Google Cloud IAM `__. @@ -661,7 +621,7 @@ def update_dashboard( client_info=self._client_info, ) - request = dashboards_service_pb2.UpdateDashboardRequest(dashboard=dashboard) + request = dashboards_service_pb2.UpdateDashboardRequest(dashboard=dashboard,) if metadata is None: metadata = [] metadata = list(metadata) diff --git a/google/cloud/monitoring_dashboard/v1/gapic/enums.py b/google/cloud/monitoring_dashboard/v1/gapic/enums.py index 253bf58..3824010 100644 --- a/google/cloud/monitoring_dashboard/v1/gapic/enums.py +++ b/google/cloud/monitoring_dashboard/v1/gapic/enums.py @@ -21,9 +21,8 @@ class SparkChartType(enum.IntEnum): """ - ``rankingMethod`` is applied to each time series independently to - produce the value which will be used to compare the time series to other - time series. + Defines the possible types of spark chart supported by the + ``Scorecard``. Attributes: SPARK_CHART_TYPE_UNSPECIFIED (int): Not allowed in well-formed requests. @@ -39,145 +38,126 @@ class SparkChartType(enum.IntEnum): class Aggregation(object): class Aligner(enum.IntEnum): """ - The Aligner describes how to bring the data points in a single - time series into temporal alignment. + The ``Aligner`` specifies the operation that will be applied to the + data points in each alignment period in a time series. Except for + ``ALIGN_NONE``, which specifies that no operation be applied, each + alignment operation replaces the set of data values in each alignment + period with a single value: the result of applying the operation to the + data values. An aligned time series has a single data value at the end + of each ``alignment_period``. + + An alignment operation can change the data type of the values, too. For + example, if you apply a counting operation to boolean values, the data + ``value_type`` in the original time series is ``BOOLEAN``, but the + ``value_type`` in the aligned result is ``INT64``. Attributes: - ALIGN_NONE (int): No alignment. Raw data is returned. Not valid if cross-time - series reduction is requested. The value type of the result is - the same as the value type of the input. - ALIGN_DELTA (int): Each of the definitions above may have "options" attached. These are - just annotations which may cause code to be generated slightly - differently or may contain hints for code that manipulates protocol - messages. - - Clients may define custom options as extensions of the \*Options - messages. These extensions may not yet be known at parsing time, so the - parser cannot store the values in them. Instead it stores them in a - field in the \*Options message called uninterpreted_option. This field - must have the same name across all \*Options messages. We then use this - field to populate the extensions when we build a descriptor, at which - point all protos have been parsed and so all extensions are known. - - Extension numbers for custom options may be chosen as follows: - - - For options which will only be used within a single application or - organization, or for experimental options, use field numbers 50000 - through 99999. It is up to you to ensure that you do not use the same - number for multiple options. - - For options which will be published and used publicly by multiple - independent entities, e-mail - protobuf-global-extension-registry@google.com to reserve extension - numbers. Simply provide your project name (e.g. Objective-C plugin) - and your project website (if available) -- there's no need to explain - how you intend to use them. Usually you only need one extension - number. You can declare multiple options with only one extension - number by putting them in a sub-message. See the Custom Options - section of the docs for examples: - https://developers.google.com/protocol-buffers/docs/proto#options If - this turns out to be popular, a web service will be set up to - automatically assign option numbers. - ALIGN_RATE (int): A designation of a specific field behavior (required, output only, - etc.) in protobuf messages. - - Examples: - - string name = 1 [(google.api.field_behavior) = REQUIRED]; State state = - 1 [(google.api.field_behavior) = OUTPUT_ONLY]; google.protobuf.Duration - ttl = 1 [(google.api.field_behavior) = INPUT_ONLY]; - google.protobuf.Timestamp expire_time = 1 [(google.api.field_behavior) = - OUTPUT_ONLY, (google.api.field_behavior) = IMMUTABLE]; - ALIGN_INTERPOLATE (int): Align by interpolating between adjacent points around the - period boundary. This alignment is valid for gauge - metrics with numeric values. The value type of the result is the same - as the value type of the input. - ALIGN_NEXT_OLDER (int): Align by shifting the oldest data point before the period - boundary to the boundary. This alignment is valid for gauge - metrics. The value type of the result is the same as the - value type of the input. - ALIGN_MIN (int): Align time series via aggregation. The resulting data point in - the alignment period is the minimum of all data points in the - period. This alignment is valid for gauge and delta metrics with numeric - values. The value type of the result is the same as the value - type of the input. - ALIGN_MAX (int): Align time series via aggregation. The resulting data point in - the alignment period is the maximum of all data points in the - period. This alignment is valid for gauge and delta metrics with numeric - values. The value type of the result is the same as the value - type of the input. - ALIGN_MEAN (int): Optional. The historical or future-looking state of the resource - pattern. - - Example: - - :: - - // The InspectTemplate message originally only supported resource - // names with organization, and project was added later. - message InspectTemplate { - option (google.api.resource) = { - type: "dlp.googleapis.com/InspectTemplate" - pattern: - "organizations/{organization}/inspectTemplates/{inspect_template}" - pattern: "projects/{project}/inspectTemplates/{inspect_template}" - history: ORIGINALLY_SINGLE_PATTERN - }; - } - ALIGN_COUNT (int): If there are more results than have been returned, then this field - is set to a non-empty value. To see the additional results, use that - value as ``pageToken`` in the next call to this method. - ALIGN_SUM (int): Align time series via aggregation. The resulting data point in - the alignment period is the sum of all data points in the - period. This alignment is valid for gauge and delta metrics with numeric - and distribution values. The value type of the output is the - same as the value type of the input. - ALIGN_STDDEV (int): Reduce by computing 95th percentile of data points across time - series for each alignment period. This reducer is valid for gauge and - delta metrics of numeric and distribution type. The value of the output - is ``DOUBLE`` - ALIGN_COUNT_TRUE (int): ``etag`` is used for optimistic concurrency control as a way to help - prevent simultaneous updates of a policy from overwriting each other. An - ``etag`` is returned in the response to ``GetDashboard``, and users are - expected to put that etag in the request to ``UpdateDashboard`` to - ensure that their change will be applied to the same version of the - Dashboard configuration. The field should not be passed during dashboard - creation. - ALIGN_COUNT_FALSE (int): Reduce by computing 50th percentile of data points across time - series for each alignment period. This reducer is valid for gauge and - delta metrics of numeric and distribution type. The value of the output - is ``DOUBLE`` - ALIGN_FRACTION_TRUE (int): A generic empty message that you can re-use to avoid defining - duplicated empty messages in your APIs. A typical example is to use it - as the request or the response type of an API method. For instance: - - :: - - service Foo { - rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty); - } - - The JSON representation for ``Empty`` is empty JSON object ``{}``. - ALIGN_PERCENTILE_99 (int): If set, all the classes from the .proto file are wrapped in a single - outer class with the given name. This applies to both Proto1 (equivalent - to the old "--one_java_file" option) and Proto2 (where a .proto always - translates to a single class, but you may want to explicitly choose the - class name). - ALIGN_PERCENTILE_95 (int): Reduce by computing the fraction of True-valued data points across - time series for each alignment period. This reducer is valid for delta - and gauge metrics of Boolean value type. The output value is in the - range [0, 1] and has value type ``DOUBLE``. - ALIGN_PERCENTILE_50 (int): Reduce by computing 5th percentile of data points across time series - for each alignment period. This reducer is valid for gauge and delta - metrics of numeric and distribution type. The value of the output is - ``DOUBLE`` - ALIGN_PERCENTILE_05 (int): An indicator of the behavior of a given field (for example, that a - field is required in requests, or given as output but ignored as input). - This **does not** change the behavior in protocol buffers itself; it - only denotes the behavior and may affect how API tooling handles the - field. - - Note: This enum **may** receive new values in the future. - ALIGN_PERCENT_CHANGE (int): The ``ListDashboards`` request. + ALIGN_NONE (int): No alignment. Raw data is returned. Not valid if cross-series + reduction is requested. The ``value_type`` of the result is the same as + the ``value_type`` of the input. + ALIGN_DELTA (int): Align and convert to ``DELTA``. The output is ``delta = y1 - y0``. + + This alignment is valid for ``CUMULATIVE`` and ``DELTA`` metrics. If the + selected alignment period results in periods with no data, then the + aligned value for such a period is created by interpolation. The + ``value_type`` of the aligned result is the same as the ``value_type`` + of the input. + ALIGN_RATE (int): Align and convert to a rate. The result is computed as + ``rate = (y1 - y0)/(t1 - t0)``, or "delta over time". Think of this + aligner as providing the slope of the line that passes through the value + at the start and at the end of the ``alignment_period``. + + This aligner is valid for ``CUMULATIVE`` and ``DELTA`` metrics with + numeric values. If the selected alignment period results in periods with + no data, then the aligned value for such a period is created by + interpolation. The output is a ``GAUGE`` metric with ``value_type`` + ``DOUBLE``. + + If, by "rate", you mean "percentage change", see the + ``ALIGN_PERCENT_CHANGE`` aligner instead. + ALIGN_INTERPOLATE (int): Align by interpolating between adjacent points around the alignment + period boundary. This aligner is valid for ``GAUGE`` metrics with + numeric values. The ``value_type`` of the aligned result is the same as + the ``value_type`` of the input. + ALIGN_NEXT_OLDER (int): Align by moving the most recent data point before the end of the + alignment period to the boundary at the end of the alignment period. + This aligner is valid for ``GAUGE`` metrics. The ``value_type`` of the + aligned result is the same as the ``value_type`` of the input. + ALIGN_MIN (int): Align the time series by returning the minimum value in each + alignment period. This aligner is valid for ``GAUGE`` and ``DELTA`` + metrics with numeric values. The ``value_type`` of the aligned result is + the same as the ``value_type`` of the input. + ALIGN_MAX (int): Align the time series by returning the maximum value in each + alignment period. This aligner is valid for ``GAUGE`` and ``DELTA`` + metrics with numeric values. The ``value_type`` of the aligned result is + the same as the ``value_type`` of the input. + ALIGN_MEAN (int): Align the time series by returning the mean value in each alignment + period. This aligner is valid for ``GAUGE`` and ``DELTA`` metrics with + numeric values. The ``value_type`` of the aligned result is ``DOUBLE``. + ALIGN_COUNT (int): Align the time series by returning the number of values in each + alignment period. This aligner is valid for ``GAUGE`` and ``DELTA`` + metrics with numeric or Boolean values. The ``value_type`` of the + aligned result is ``INT64``. + ALIGN_SUM (int): Align the time series by returning the sum of the values in each + alignment period. This aligner is valid for ``GAUGE`` and ``DELTA`` + metrics with numeric and distribution values. The ``value_type`` of the + aligned result is the same as the ``value_type`` of the input. + ALIGN_STDDEV (int): Align the time series by returning the standard deviation of the + values in each alignment period. This aligner is valid for ``GAUGE`` and + ``DELTA`` metrics with numeric values. The ``value_type`` of the output + is ``DOUBLE``. + ALIGN_COUNT_TRUE (int): Align the time series by returning the number of ``True`` values in + each alignment period. This aligner is valid for ``GAUGE`` metrics with + Boolean values. The ``value_type`` of the output is ``INT64``. + ALIGN_COUNT_FALSE (int): Align the time series by returning the number of ``False`` values in + each alignment period. This aligner is valid for ``GAUGE`` metrics with + Boolean values. The ``value_type`` of the output is ``INT64``. + ALIGN_FRACTION_TRUE (int): Align the time series by returning the ratio of the number of + ``True`` values to the total number of values in each alignment period. + This aligner is valid for ``GAUGE`` metrics with Boolean values. The + output value is in the range [0.0, 1.0] and has ``value_type`` + ``DOUBLE``. + ALIGN_PERCENTILE_99 (int): Align the time series by using `percentile + aggregation `__. The resulting + data point in each alignment period is the 99th percentile of all data + points in the period. This aligner is valid for ``GAUGE`` and ``DELTA`` + metrics with distribution values. The output is a ``GAUGE`` metric with + ``value_type`` ``DOUBLE``. + ALIGN_PERCENTILE_95 (int): Align the time series by using `percentile + aggregation `__. The resulting + data point in each alignment period is the 95th percentile of all data + points in the period. This aligner is valid for ``GAUGE`` and ``DELTA`` + metrics with distribution values. The output is a ``GAUGE`` metric with + ``value_type`` ``DOUBLE``. + ALIGN_PERCENTILE_50 (int): Align the time series by using `percentile + aggregation `__. The resulting + data point in each alignment period is the 50th percentile of all data + points in the period. This aligner is valid for ``GAUGE`` and ``DELTA`` + metrics with distribution values. The output is a ``GAUGE`` metric with + ``value_type`` ``DOUBLE``. + ALIGN_PERCENTILE_05 (int): Align the time series by using `percentile + aggregation `__. The resulting + data point in each alignment period is the 5th percentile of all data + points in the period. This aligner is valid for ``GAUGE`` and ``DELTA`` + metrics with distribution values. The output is a ``GAUGE`` metric with + ``value_type`` ``DOUBLE``. + ALIGN_PERCENT_CHANGE (int): Align and convert to a percentage change. This aligner is valid for + ``GAUGE`` and ``DELTA`` metrics with numeric values. This alignment + returns ``((current - previous)/previous) * 100``, where the value of + ``previous`` is determined based on the ``alignment_period``. + + If the values of ``current`` and ``previous`` are both 0, then the + returned value is 0. If only ``previous`` is 0, the returned value is + infinity. + + A 10-minute moving mean is computed at each point of the alignment + period prior to the above calculation to smooth the metric and prevent + false positives from very short-lived spikes. The moving mean is only + applicable for data whose values are ``>= 0``. Any values ``< 0`` are + treated as a missing datapoint, and are ignored. While ``DELTA`` metrics + are accepted by this alignment, special care should be taken that the + values for the metric will always be positive. The output is a ``GAUGE`` + metric with ``value_type`` ``DOUBLE``. """ ALIGN_NONE = 0 @@ -202,301 +182,71 @@ class name). class Reducer(enum.IntEnum): """ - A Reducer describes how to aggregate data points from multiple - time series into a single time series. + A Reducer operation describes how to aggregate data points from multiple + time series into a single time series, where the value of each data point + in the resulting series is a function of all the already aligned values in + the input time series. Attributes: - REDUCE_NONE (int): No cross-time series reduction. The output of the aligner is + REDUCE_NONE (int): No cross-time series reduction. The output of the ``Aligner`` is returned. - REDUCE_MEAN (int): The alignment period for per-\ ``time series`` alignment. If - present, ``alignmentPeriod`` must be at least 60 seconds. After per-time - series alignment, each time series will contain data points only on the - period boundaries. If ``perSeriesAligner`` is not specified or equals - ``ALIGN_NONE``, then this field is ignored. If ``perSeriesAligner`` is - specified and does not equal ``ALIGN_NONE``, then this field must be - defined; otherwise an error is returned. - REDUCE_MIN (int): Reduce by computing the minimum across time series for each - alignment period. This reducer is valid for delta and - gauge metrics with numeric values. The value type of the output - is the same as the value type of the input. - REDUCE_MAX (int): Reduce by computing the maximum across time series for each - alignment period. This reducer is valid for delta and - gauge metrics with numeric values. The value type of the output - is the same as the value type of the input. - REDUCE_SUM (int): Reduce by computing the sum across time series for each - alignment period. This reducer is valid for delta and - gauge metrics with numeric and distribution values. The value type of - the output is the same as the value type of the input. - REDUCE_STDDEV (int): Signed seconds of the span of time. Must be from -315,576,000,000 to - +315,576,000,000 inclusive. Note: these bounds are computed from: 60 - sec/min \* 60 min/hr \* 24 hr/day \* 365.25 days/year \* 10000 years - REDUCE_COUNT (int): Denotes a field as required. This indicates that the field **must** - be provided as part of the request, and failure to do so will cause an - error (usually ``INVALID_ARGUMENT``). - REDUCE_COUNT_TRUE (int): ``FieldMask`` represents a set of symbolic field paths, for example: - - :: - - paths: "f.a" - paths: "f.b.d" - - Here ``f`` represents a field in some root message, ``a`` and ``b`` - fields in the message found in ``f``, and ``d`` a field found in the - message in ``f.b``. - - Field masks are used to specify a subset of fields that should be - returned by a get operation or modified by an update operation. Field - masks also have a custom JSON encoding (see below). - - # Field Masks in Projections - - When used in the context of a projection, a response message or - sub-message is filtered by the API to only contain those fields as - specified in the mask. For example, if the mask in the previous example - is applied to a response message as follows: - - :: - - f { - a : 22 - b { - d : 1 - x : 2 - } - y : 13 - } - z: 8 - - The result will not contain specific values for fields x,y and z (their - value will be set to the default, and omitted in proto text output): - - :: - - f { - a : 22 - b { - d : 1 - } - } - - A repeated field is not allowed except at the last position of a paths - string. - - If a FieldMask object is not present in a get operation, the operation - applies to all fields (as if a FieldMask of all fields had been - specified). - - Note that a field mask does not necessarily apply to the top-level - response message. In case of a REST get operation, the field mask - applies directly to the response, but in case of a REST list operation, - the mask instead applies to each individual message in the returned - resource list. In case of a REST custom method, other definitions may be - used. Where the mask applies will be clearly documented together with - its declaration in the API. In any case, the effect on the returned - resource/resources is required behavior for APIs. - - # Field Masks in Update Operations - - A field mask in update operations specifies which fields of the targeted - resource are going to be updated. The API is required to only change the - values of the fields as specified in the mask and leave the others - untouched. If a resource is passed in to describe the updated values, - the API ignores the values of all fields not covered by the mask. - - If a repeated field is specified for an update operation, new values - will be appended to the existing repeated field in the target resource. - Note that a repeated field is only allowed in the last position of a - ``paths`` string. - - If a sub-message is specified in the last position of the field mask for - an update operation, then new value will be merged into the existing - sub-message in the target resource. - - For example, given the target message: - - :: - - f { - b { - d: 1 - x: 2 - } - c: [1] - } - - And an update message: - - :: - - f { - b { - d: 10 - } - c: [2] - } - - then if the field mask is: - - paths: ["f.b", "f.c"] - - then the result will be: - - :: - - f { - b { - d: 10 - x: 2 - } - c: [1, 2] - } - - An implementation may provide options to override this default behavior - for repeated and message fields. - - In order to reset a field's value to the default, the field must be in - the mask and set to the default value in the provided resource. Hence, - in order to reset all fields of a resource, provide a default instance - of the resource and set all fields in the mask, or do not provide a mask - as described below. - - If a field mask is not present on update, the operation applies to all - fields (as if a field mask of all fields has been specified). Note that - in the presence of schema evolution, this may mean that fields the - client does not know and has therefore not filled into the request will - be reset to their default. If this is unwanted behavior, a specific - service may require a client to always specify a field mask, producing - an error if not. - - As with get operations, the location of the resource which describes the - updated values in the request message depends on the operation kind. In - any case, the effect of the field mask is required to be honored by the - API. - - ## Considerations for HTTP REST - - The HTTP kind of an update operation which uses a field mask must be set - to PATCH instead of PUT in order to satisfy HTTP semantics (PUT must - only be used for full updates). - - # JSON Encoding of Field Masks - - In JSON, a field mask is encoded as a single string where paths are - separated by a comma. Fields name in each path are converted to/from - lower-camel naming conventions. - - As an example, consider the following message declarations: - - :: - - message Profile { - User user = 1; - Photo photo = 2; - } - message User { - string display_name = 1; - string address = 2; - } - - In proto a field mask for ``Profile`` may look as such: - - :: - - mask { - paths: "user.display_name" - paths: "photo" - } - - In JSON, the same mask is represented as below: - - :: - - { - mask: "user.displayName,photo" - } - - # Field Masks and Oneof Fields - - Field masks treat fields in oneofs just as regular fields. Consider the - following message: - - :: - - message SampleMessage { - oneof test_oneof { - string name = 4; - SubMessage sub_message = 9; - } - } - - The field mask can be: - - :: - - mask { - paths: "name" - } - - Or: - - :: - - mask { - paths: "sub_message" - } - - Note that oneof type names ("test_oneof" in this case) cannot be used in - paths. - - ## Field Mask Verification - - The implementation of any API method which has a FieldMask type field in - the request should verify the included field paths, and return an - ``INVALID_ARGUMENT`` error if any path is duplicated or unmappable. - REDUCE_COUNT_FALSE (int): The approach to be used to align individual time series. Not all - alignment functions may be applied to all time series, depending on the - metric type and value type of the original time series. Alignment may - change the metric type or the value type of the time series. - - Time series data must be aligned in order to perform cross-time series - reduction. If ``crossSeriesReducer`` is specified, then - ``perSeriesAligner`` must be specified and not equal ``ALIGN_NONE`` and - ``alignmentPeriod`` must be specified; otherwise, an error is returned. - REDUCE_FRACTION_TRUE (int): Plot type is unspecified. The view will default to ``LINE``. - REDUCE_PERCENTILE_99 (int): The approach to be used to combine time series. Not all reducer - functions may be applied to all time series, depending on the metric - type and the value type of the original time series. Reduction may - change the metric type of value type of the time series. - - Time series data must be aligned in order to perform cross-time series - reduction. If ``crossSeriesReducer`` is specified, then - ``perSeriesAligner`` must be specified and not equal ``ALIGN_NONE`` and - ``alignmentPeriod`` must be specified; otherwise, an error is returned. - REDUCE_PERCENTILE_95 (int): Denotes a field as output only. This indicates that the field is - provided in responses, but including the field in a request does nothing - (the server *must* ignore it and *must not* throw an error as a result - of the field's presence). - REDUCE_PERCENTILE_50 (int): Signed fractions of a second at nanosecond resolution of the span of - time. Durations less than one second are represented with a 0 - ``seconds`` field and a positive or negative ``nanos`` field. For - durations of one second or more, a non-zero value for the ``nanos`` - field must be of the same sign as the ``seconds`` field. Must be from - -999,999,999 to +999,999,999 inclusive. - REDUCE_PERCENTILE_05 (int): Set true to use the old proto1 MessageSet wire format for - extensions. This is provided for backwards-compatibility with the - MessageSet wire format. You should not use this for any other reason: - It's less efficient, has fewer features, and is more complicated. - - The message must be defined exactly as follows: message Foo { option - message_set_wire_format = true; extensions 4 to max; } Note that the - message cannot have any defined fields; MessageSets only have - extensions. - - All extensions of your type must be singular messages; e.g. they cannot - be int32s, enums, or repeated messages. - - Because this is an option, the above two restrictions are not enforced - by the protocol compiler. + REDUCE_MEAN (int): Reduce by computing the mean value across time series for each + alignment period. This reducer is valid for ``DELTA`` and ``GAUGE`` + metrics with numeric or distribution values. The ``value_type`` of the + output is ``DOUBLE``. + REDUCE_MIN (int): Reduce by computing the minimum value across time series for each + alignment period. This reducer is valid for ``DELTA`` and ``GAUGE`` + metrics with numeric values. The ``value_type`` of the output is the + same as the ``value_type`` of the input. + REDUCE_MAX (int): Reduce by computing the maximum value across time series for each + alignment period. This reducer is valid for ``DELTA`` and ``GAUGE`` + metrics with numeric values. The ``value_type`` of the output is the + same as the ``value_type`` of the input. + REDUCE_SUM (int): Reduce by computing the sum across time series for each alignment + period. This reducer is valid for ``DELTA`` and ``GAUGE`` metrics with + numeric and distribution values. The ``value_type`` of the output is the + same as the ``value_type`` of the input. + REDUCE_STDDEV (int): Reduce by computing the standard deviation across time series for + each alignment period. This reducer is valid for ``DELTA`` and ``GAUGE`` + metrics with numeric or distribution values. The ``value_type`` of the + output is ``DOUBLE``. + REDUCE_COUNT (int): Reduce by computing the number of data points across time series for + each alignment period. This reducer is valid for ``DELTA`` and ``GAUGE`` + metrics of numeric, Boolean, distribution, and string ``value_type``. + The ``value_type`` of the output is ``INT64``. + REDUCE_COUNT_TRUE (int): Reduce by computing the number of ``True``-valued data points across + time series for each alignment period. This reducer is valid for + ``DELTA`` and ``GAUGE`` metrics of Boolean ``value_type``. The + ``value_type`` of the output is ``INT64``. + REDUCE_COUNT_FALSE (int): Reduce by computing the number of ``False``-valued data points + across time series for each alignment period. This reducer is valid for + ``DELTA`` and ``GAUGE`` metrics of Boolean ``value_type``. The + ``value_type`` of the output is ``INT64``. + REDUCE_FRACTION_TRUE (int): Reduce by computing the ratio of the number of ``True``-valued data + points to the total number of data points for each alignment period. + This reducer is valid for ``DELTA`` and ``GAUGE`` metrics of Boolean + ``value_type``. The output value is in the range [0.0, 1.0] and has + ``value_type`` ``DOUBLE``. + REDUCE_PERCENTILE_99 (int): Reduce by computing the `99th + percentile `__ of data points + across time series for each alignment period. This reducer is valid for + ``GAUGE`` and ``DELTA`` metrics of numeric and distribution type. The + value of the output is ``DOUBLE``. + REDUCE_PERCENTILE_95 (int): Reduce by computing the `95th + percentile `__ of data points + across time series for each alignment period. This reducer is valid for + ``GAUGE`` and ``DELTA`` metrics of numeric and distribution type. The + value of the output is ``DOUBLE``. + REDUCE_PERCENTILE_50 (int): Reduce by computing the `50th + percentile `__ of data points + across time series for each alignment period. This reducer is valid for + ``GAUGE`` and ``DELTA`` metrics of numeric and distribution type. The + value of the output is ``DOUBLE``. + REDUCE_PERCENTILE_05 (int): Reduce by computing the `5th + percentile `__ of data points + across time series for each alignment period. This reducer is valid for + ``GAUGE`` and ``DELTA`` metrics of numeric and distribution type. The + value of the output is ``DOUBLE``. """ REDUCE_NONE = 0 @@ -521,10 +271,7 @@ class Mode(enum.IntEnum): Chart mode options. Attributes: - MODE_UNSPECIFIED (int): The data is plotted as a heatmap. The series being plotted must have - a ``DISTRIBUTION`` value type. The value of each bucket in the - distribution is displayed as a color. This type is not currently - available in the Stackdriver Monitoring application. + MODE_UNSPECIFIED (int): Mode is unspecified. The view will default to ``COLOR``. COLOR (int): The chart distinguishes data series using different color. Line colors may get reused when there are many lines in the chart. X_RAY (int): The chart uses the Stackdriver x-ray mode, in which each @@ -545,9 +292,10 @@ class Direction(enum.IntEnum): Describes the ranking directions. Attributes: - DIRECTION_UNSPECIFIED (int): Not allowed in well-formed requests. - TOP (int): Pass the highest ranking inputs. - BOTTOM (int): Pass the lowest ranking inputs. + DIRECTION_UNSPECIFIED (int): Not allowed. You must specify a different ``Direction`` if you + specify a ``PickTimeSeriesFilter``. + TOP (int): Pass the highest ``num_time_series`` ranking inputs. + BOTTOM (int): Pass the lowest ``num_time_series`` ranking inputs. """ DIRECTION_UNSPECIFIED = 0 @@ -556,10 +304,12 @@ class Direction(enum.IntEnum): class Method(enum.IntEnum): """ - The value reducers that can be applied to a PickTimeSeriesFilter. + The value reducers that can be applied to a + ``PickTimeSeriesFilter``. Attributes: - METHOD_UNSPECIFIED (int): Not allowed in well-formed requests. + METHOD_UNSPECIFIED (int): Not allowed. You must specify a different ``Method`` if you specify + a ``PickTimeSeriesFilter``. METHOD_MEAN (int): Select the mean of all values. METHOD_MAX (int): Select the maximum value. METHOD_MIN (int): Select the minimum value. @@ -647,10 +397,7 @@ class PlotType(enum.IntEnum): The types of plotting strategies for data sets. Attributes: - PLOT_TYPE_UNSPECIFIED (int): Align time series via aggregation. The resulting data point in the - alignment period is the count of all data points in the period. This - alignment is valid for gauge and delta metrics with numeric or Boolean - values. The value type of the output is ``INT64``. + PLOT_TYPE_UNSPECIFIED (int): Plot type is unspecified. The view will default to ``LINE``. LINE (int): The data is plotted as a set of lines (one line per series). STACKED_AREA (int): The data is plotted as a set of filled areas (one area per series), with the areas stacked vertically (the base of each area is the top of @@ -660,10 +407,10 @@ class PlotType(enum.IntEnum): with the boxes stacked vertically (the base of each box is the top of its predecessor, and the base of the first box is the X axis). Since the boxes do not overlap, each is filled with a different opaque color. - HEATMAP (int): If this field is not empty then it must contain the - ``nextPageToken`` value returned by a previous call to this method. - Using this field causes the method to return additional results from the - previous method call. + HEATMAP (int): The data is plotted as a heatmap. The series being plotted must have + a ``DISTRIBUTION`` value type. The value of each bucket in the + distribution is displayed as a color. This type is not currently + available in the Stackdriver Monitoring application. """ PLOT_TYPE_UNSPECIFIED = 0 @@ -678,10 +425,7 @@ class Scale(enum.IntEnum): Types of scales used in axes. Attributes: - SCALE_UNSPECIFIED (int): Align time series via aggregation. The resulting data point in the - alignment period is the standard deviation of all data points in the - period. This alignment is valid for gauge and delta metrics with numeric - values. The value type of the output is ``DOUBLE``. + SCALE_UNSPECIFIED (int): Scale is unspecified. The view will default to ``LINEAR``. LINEAR (int): Linear scale. LOG10 (int): Logarithmic scale (base 10). """ diff --git a/google/cloud/monitoring_dashboard/v1/gapic/transports/dashboards_service_grpc_transport.py b/google/cloud/monitoring_dashboard/v1/gapic/transports/dashboards_service_grpc_transport.py index a306745..9f49ebf 100644 --- a/google/cloud/monitoring_dashboard/v1/gapic/transports/dashboards_service_grpc_transport.py +++ b/google/cloud/monitoring_dashboard/v1/gapic/transports/dashboards_service_grpc_transport.py @@ -58,7 +58,7 @@ def __init__( # exception (channels come with credentials baked in already). if channel is not None and credentials is not None: raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive." + "The `channel` and `credentials` arguments are mutually " "exclusive.", ) # Create the channel. @@ -79,7 +79,7 @@ def __init__( self._stubs = { "dashboards_service_stub": dashboards_service_pb2_grpc.DashboardsServiceStub( channel - ) + ), } @classmethod @@ -118,22 +118,11 @@ def channel(self): def create_dashboard(self): """Return the gRPC stub for :meth:`DashboardsServiceClient.create_dashboard`. - Identifies which part of the FileDescriptorProto was defined at this - location. - - Each element is a field number or an index. They form a path from the - root FileDescriptorProto to the place where the definition. For example, - this path: [ 4, 3, 2, 7, 1 ] refers to: file.message_type(3) // 4, 3 - .field(7) // 2, 7 .name() // 1 This is because - FileDescriptorProto.message_type has field number 4: repeated - DescriptorProto message_type = 4; and DescriptorProto.field has field - number 2: repeated FieldDescriptorProto field = 2; and - FieldDescriptorProto.name has field number 1: optional string name = 1; + Creates a new custom dashboard. - Thus, the above path gives the location of a field name. If we removed - the last element: [ 4, 3, 2, 7 ] this path refers to the whole field - declaration (from the beginning of the label to the terminating - semicolon). + This method requires the ``monitoring.dashboards.create`` permission on + the specified project. For more information, see `Google Cloud + IAM `__. Returns: Callable: A callable which accepts the appropriate @@ -146,8 +135,11 @@ def create_dashboard(self): def list_dashboards(self): """Return the gRPC stub for :meth:`DashboardsServiceClient.list_dashboards`. - An annotation that describes a resource definition without a - corresponding message; see ``ResourceDescriptor``. + Lists the existing dashboards. + + This method requires the ``monitoring.dashboards.list`` permission on + the specified project. For more information, see `Google Cloud + IAM `__. Returns: Callable: A callable which accepts the appropriate @@ -160,19 +152,11 @@ def list_dashboards(self): def get_dashboard(self): """Return the gRPC stub for :meth:`DashboardsServiceClient.get_dashboard`. - Align and convert to a percentage change. This alignment is valid - for gauge and delta metrics with numeric values. This alignment - conceptually computes the equivalent of "((current - - previous)/previous)*100" where previous value is determined based on the - alignmentPeriod. In the event that previous is 0 the calculated value is - infinity with the exception that if both (current - previous) and - previous are 0 the calculated value is 0. A 10 minute moving mean is - computed at each point of the time window prior to the above calculation - to smooth the metric and prevent false positives from very short lived - spikes. Only applicable for data that is >= 0. Any values < 0 are - treated as no data. While delta metrics are accepted by this alignment - special care should be taken that the values for the metric will always - be positive. The output is a gauge metric with value type ``DOUBLE``. + Fetches a specific dashboard. + + This method requires the ``monitoring.dashboards.get`` permission on the + specified dashboard. For more information, see `Google Cloud + IAM `__. Returns: Callable: A callable which accepts the appropriate @@ -185,37 +169,11 @@ def get_dashboard(self): def delete_dashboard(self): """Return the gRPC stub for :meth:`DashboardsServiceClient.delete_dashboard`. - Protocol Buffers - Google's data interchange format Copyright 2008 - Google Inc. All rights reserved. - https://developers.google.com/protocol-buffers/ - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions are - met: - - :: - - * Redistributions of source code must retain the above copyright - - notice, this list of conditions and the following disclaimer. \* - Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. \* - Neither the name of Google Inc. nor the names of its contributors may be - used to endorse or promote products derived from this software without - specific prior written permission. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS - IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED - TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A - PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER - OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, - EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR - PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF - LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING - NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS - SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + Deletes an existing custom dashboard. + + This method requires the ``monitoring.dashboards.delete`` permission on + the specified dashboard. For more information, see `Google Cloud + IAM `__. Returns: Callable: A callable which accepts the appropriate @@ -228,9 +186,9 @@ def delete_dashboard(self): def update_dashboard(self): """Return the gRPC stub for :meth:`DashboardsServiceClient.update_dashboard`. - Deletes an existing custom dashboard. + Replaces an existing custom dashboard with a new definition. - This method requires the ``monitoring.dashboards.delete`` permission on + This method requires the ``monitoring.dashboards.update`` permission on the specified dashboard. For more information, see `Google Cloud IAM `__. diff --git a/google/cloud/monitoring_dashboard/v1/proto/common.proto b/google/cloud/monitoring_dashboard/v1/proto/common.proto new file mode 100644 index 0000000..6dedcf0 --- /dev/null +++ b/google/cloud/monitoring_dashboard/v1/proto/common.proto @@ -0,0 +1,449 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.monitoring.dashboard.v1; + +import "google/api/distribution.proto"; +import "google/protobuf/duration.proto"; + +option go_package = "google.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard"; +option java_multiple_files = true; +option java_outer_classname = "CommonProto"; +option java_package = "com.google.monitoring.dashboard.v1"; +option ruby_package = "Google::Cloud::Monitoring::Dashboard::V1"; + +// Describes how to combine multiple time series to provide a different view of +// the data. Aggregation of time series is done in two steps. First, each time +// series in the set is _aligned_ to the same time interval boundaries, then the +// set of time series is optionally _reduced_ in number. +// +// Alignment consists of applying the `per_series_aligner` operation +// to each time series after its data has been divided into regular +// `alignment_period` time intervals. This process takes _all_ of the data +// points in an alignment period, applies a mathematical transformation such as +// averaging, minimum, maximum, delta, etc., and converts them into a single +// data point per period. +// +// Reduction is when the aligned and transformed time series can optionally be +// combined, reducing the number of time series through similar mathematical +// transformations. Reduction involves applying a `cross_series_reducer` to +// all the time series, optionally sorting the time series into subsets with +// `group_by_fields`, and applying the reducer to each subset. +// +// The raw time series data can contain a huge amount of information from +// multiple sources. Alignment and reduction transforms this mass of data into +// a more manageable and representative collection of data, for example "the +// 95% latency across the average of all tasks in a cluster". This +// representative data can be more easily graphed and comprehended, and the +// individual time series data is still available for later drilldown. For more +// details, see [Filtering and +// aggregation](https://cloud.google.com/monitoring/api/v3/aggregation). +message Aggregation { + // The `Aligner` specifies the operation that will be applied to the data + // points in each alignment period in a time series. Except for + // `ALIGN_NONE`, which specifies that no operation be applied, each alignment + // operation replaces the set of data values in each alignment period with + // a single value: the result of applying the operation to the data values. + // An aligned time series has a single data value at the end of each + // `alignment_period`. + // + // An alignment operation can change the data type of the values, too. For + // example, if you apply a counting operation to boolean values, the data + // `value_type` in the original time series is `BOOLEAN`, but the `value_type` + // in the aligned result is `INT64`. + enum Aligner { + // No alignment. Raw data is returned. Not valid if cross-series reduction + // is requested. The `value_type` of the result is the same as the + // `value_type` of the input. + ALIGN_NONE = 0; + + // Align and convert to + // [DELTA][google.api.MetricDescriptor.MetricKind.DELTA]. + // The output is `delta = y1 - y0`. + // + // This alignment is valid for + // [CUMULATIVE][google.api.MetricDescriptor.MetricKind.CUMULATIVE] and + // `DELTA` metrics. If the selected alignment period results in periods + // with no data, then the aligned value for such a period is created by + // interpolation. The `value_type` of the aligned result is the same as + // the `value_type` of the input. + ALIGN_DELTA = 1; + + // Align and convert to a rate. The result is computed as + // `rate = (y1 - y0)/(t1 - t0)`, or "delta over time". + // Think of this aligner as providing the slope of the line that passes + // through the value at the start and at the end of the `alignment_period`. + // + // This aligner is valid for `CUMULATIVE` + // and `DELTA` metrics with numeric values. If the selected alignment + // period results in periods with no data, then the aligned value for + // such a period is created by interpolation. The output is a `GAUGE` + // metric with `value_type` `DOUBLE`. + // + // If, by "rate", you mean "percentage change", see the + // `ALIGN_PERCENT_CHANGE` aligner instead. + ALIGN_RATE = 2; + + // Align by interpolating between adjacent points around the alignment + // period boundary. This aligner is valid for `GAUGE` metrics with + // numeric values. The `value_type` of the aligned result is the same as the + // `value_type` of the input. + ALIGN_INTERPOLATE = 3; + + // Align by moving the most recent data point before the end of the + // alignment period to the boundary at the end of the alignment + // period. This aligner is valid for `GAUGE` metrics. The `value_type` of + // the aligned result is the same as the `value_type` of the input. + ALIGN_NEXT_OLDER = 4; + + // Align the time series by returning the minimum value in each alignment + // period. This aligner is valid for `GAUGE` and `DELTA` metrics with + // numeric values. The `value_type` of the aligned result is the same as + // the `value_type` of the input. + ALIGN_MIN = 10; + + // Align the time series by returning the maximum value in each alignment + // period. This aligner is valid for `GAUGE` and `DELTA` metrics with + // numeric values. The `value_type` of the aligned result is the same as + // the `value_type` of the input. + ALIGN_MAX = 11; + + // Align the time series by returning the mean value in each alignment + // period. This aligner is valid for `GAUGE` and `DELTA` metrics with + // numeric values. The `value_type` of the aligned result is `DOUBLE`. + ALIGN_MEAN = 12; + + // Align the time series by returning the number of values in each alignment + // period. This aligner is valid for `GAUGE` and `DELTA` metrics with + // numeric or Boolean values. The `value_type` of the aligned result is + // `INT64`. + ALIGN_COUNT = 13; + + // Align the time series by returning the sum of the values in each + // alignment period. This aligner is valid for `GAUGE` and `DELTA` + // metrics with numeric and distribution values. The `value_type` of the + // aligned result is the same as the `value_type` of the input. + ALIGN_SUM = 14; + + // Align the time series by returning the standard deviation of the values + // in each alignment period. This aligner is valid for `GAUGE` and + // `DELTA` metrics with numeric values. The `value_type` of the output is + // `DOUBLE`. + ALIGN_STDDEV = 15; + + // Align the time series by returning the number of `True` values in + // each alignment period. This aligner is valid for `GAUGE` metrics with + // Boolean values. The `value_type` of the output is `INT64`. + ALIGN_COUNT_TRUE = 16; + + // Align the time series by returning the number of `False` values in + // each alignment period. This aligner is valid for `GAUGE` metrics with + // Boolean values. The `value_type` of the output is `INT64`. + ALIGN_COUNT_FALSE = 24; + + // Align the time series by returning the ratio of the number of `True` + // values to the total number of values in each alignment period. This + // aligner is valid for `GAUGE` metrics with Boolean values. The output + // value is in the range [0.0, 1.0] and has `value_type` `DOUBLE`. + ALIGN_FRACTION_TRUE = 17; + + // Align the time series by using [percentile + // aggregation](https://en.wikipedia.org/wiki/Percentile). The resulting + // data point in each alignment period is the 99th percentile of all data + // points in the period. This aligner is valid for `GAUGE` and `DELTA` + // metrics with distribution values. The output is a `GAUGE` metric with + // `value_type` `DOUBLE`. + ALIGN_PERCENTILE_99 = 18; + + // Align the time series by using [percentile + // aggregation](https://en.wikipedia.org/wiki/Percentile). The resulting + // data point in each alignment period is the 95th percentile of all data + // points in the period. This aligner is valid for `GAUGE` and `DELTA` + // metrics with distribution values. The output is a `GAUGE` metric with + // `value_type` `DOUBLE`. + ALIGN_PERCENTILE_95 = 19; + + // Align the time series by using [percentile + // aggregation](https://en.wikipedia.org/wiki/Percentile). The resulting + // data point in each alignment period is the 50th percentile of all data + // points in the period. This aligner is valid for `GAUGE` and `DELTA` + // metrics with distribution values. The output is a `GAUGE` metric with + // `value_type` `DOUBLE`. + ALIGN_PERCENTILE_50 = 20; + + // Align the time series by using [percentile + // aggregation](https://en.wikipedia.org/wiki/Percentile). The resulting + // data point in each alignment period is the 5th percentile of all data + // points in the period. This aligner is valid for `GAUGE` and `DELTA` + // metrics with distribution values. The output is a `GAUGE` metric with + // `value_type` `DOUBLE`. + ALIGN_PERCENTILE_05 = 21; + + // Align and convert to a percentage change. This aligner is valid for + // `GAUGE` and `DELTA` metrics with numeric values. This alignment returns + // `((current - previous)/previous) * 100`, where the value of `previous` is + // determined based on the `alignment_period`. + // + // If the values of `current` and `previous` are both 0, then the returned + // value is 0. If only `previous` is 0, the returned value is infinity. + // + // A 10-minute moving mean is computed at each point of the alignment period + // prior to the above calculation to smooth the metric and prevent false + // positives from very short-lived spikes. The moving mean is only + // applicable for data whose values are `>= 0`. Any values `< 0` are + // treated as a missing datapoint, and are ignored. While `DELTA` + // metrics are accepted by this alignment, special care should be taken that + // the values for the metric will always be positive. The output is a + // `GAUGE` metric with `value_type` `DOUBLE`. + ALIGN_PERCENT_CHANGE = 23; + } + + // A Reducer operation describes how to aggregate data points from multiple + // time series into a single time series, where the value of each data point + // in the resulting series is a function of all the already aligned values in + // the input time series. + enum Reducer { + // No cross-time series reduction. The output of the `Aligner` is + // returned. + REDUCE_NONE = 0; + + // Reduce by computing the mean value across time series for each + // alignment period. This reducer is valid for + // [DELTA][google.api.MetricDescriptor.MetricKind.DELTA] and + // [GAUGE][google.api.MetricDescriptor.MetricKind.GAUGE] metrics with + // numeric or distribution values. The `value_type` of the output is + // [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE]. + REDUCE_MEAN = 1; + + // Reduce by computing the minimum value across time series for each + // alignment period. This reducer is valid for `DELTA` and `GAUGE` metrics + // with numeric values. The `value_type` of the output is the same as the + // `value_type` of the input. + REDUCE_MIN = 2; + + // Reduce by computing the maximum value across time series for each + // alignment period. This reducer is valid for `DELTA` and `GAUGE` metrics + // with numeric values. The `value_type` of the output is the same as the + // `value_type` of the input. + REDUCE_MAX = 3; + + // Reduce by computing the sum across time series for each + // alignment period. This reducer is valid for `DELTA` and `GAUGE` metrics + // with numeric and distribution values. The `value_type` of the output is + // the same as the `value_type` of the input. + REDUCE_SUM = 4; + + // Reduce by computing the standard deviation across time series + // for each alignment period. This reducer is valid for `DELTA` and + // `GAUGE` metrics with numeric or distribution values. The `value_type` + // of the output is `DOUBLE`. + REDUCE_STDDEV = 5; + + // Reduce by computing the number of data points across time series + // for each alignment period. This reducer is valid for `DELTA` and + // `GAUGE` metrics of numeric, Boolean, distribution, and string + // `value_type`. The `value_type` of the output is `INT64`. + REDUCE_COUNT = 6; + + // Reduce by computing the number of `True`-valued data points across time + // series for each alignment period. This reducer is valid for `DELTA` and + // `GAUGE` metrics of Boolean `value_type`. The `value_type` of the output + // is `INT64`. + REDUCE_COUNT_TRUE = 7; + + // Reduce by computing the number of `False`-valued data points across time + // series for each alignment period. This reducer is valid for `DELTA` and + // `GAUGE` metrics of Boolean `value_type`. The `value_type` of the output + // is `INT64`. + REDUCE_COUNT_FALSE = 15; + + // Reduce by computing the ratio of the number of `True`-valued data points + // to the total number of data points for each alignment period. This + // reducer is valid for `DELTA` and `GAUGE` metrics of Boolean `value_type`. + // The output value is in the range [0.0, 1.0] and has `value_type` + // `DOUBLE`. + REDUCE_FRACTION_TRUE = 8; + + // Reduce by computing the [99th + // percentile](https://en.wikipedia.org/wiki/Percentile) of data points + // across time series for each alignment period. This reducer is valid for + // `GAUGE` and `DELTA` metrics of numeric and distribution type. The value + // of the output is `DOUBLE`. + REDUCE_PERCENTILE_99 = 9; + + // Reduce by computing the [95th + // percentile](https://en.wikipedia.org/wiki/Percentile) of data points + // across time series for each alignment period. This reducer is valid for + // `GAUGE` and `DELTA` metrics of numeric and distribution type. The value + // of the output is `DOUBLE`. + REDUCE_PERCENTILE_95 = 10; + + // Reduce by computing the [50th + // percentile](https://en.wikipedia.org/wiki/Percentile) of data points + // across time series for each alignment period. This reducer is valid for + // `GAUGE` and `DELTA` metrics of numeric and distribution type. The value + // of the output is `DOUBLE`. + REDUCE_PERCENTILE_50 = 11; + + // Reduce by computing the [5th + // percentile](https://en.wikipedia.org/wiki/Percentile) of data points + // across time series for each alignment period. This reducer is valid for + // `GAUGE` and `DELTA` metrics of numeric and distribution type. The value + // of the output is `DOUBLE`. + REDUCE_PERCENTILE_05 = 12; + } + + // The `alignment_period` specifies a time interval, in seconds, that is used + // to divide the data in all the + // [time series][google.monitoring.v3.TimeSeries] into consistent blocks of + // time. This will be done before the per-series aligner can be applied to + // the data. + // + // The value must be at least 60 seconds. If a per-series aligner other than + // `ALIGN_NONE` is specified, this field is required or an error is returned. + // If no per-series aligner is specified, or the aligner `ALIGN_NONE` is + // specified, then this field is ignored. + google.protobuf.Duration alignment_period = 1; + + // An `Aligner` describes how to bring the data points in a single + // time series into temporal alignment. Except for `ALIGN_NONE`, all + // alignments cause all the data points in an `alignment_period` to be + // mathematically grouped together, resulting in a single data point for + // each `alignment_period` with end timestamp at the end of the period. + // + // Not all alignment operations may be applied to all time series. The valid + // choices depend on the `metric_kind` and `value_type` of the original time + // series. Alignment can change the `metric_kind` or the `value_type` of + // the time series. + // + // Time series data must be aligned in order to perform cross-time + // series reduction. If `cross_series_reducer` is specified, then + // `per_series_aligner` must be specified and not equal to `ALIGN_NONE` + // and `alignment_period` must be specified; otherwise, an error is + // returned. + Aligner per_series_aligner = 2; + + // The reduction operation to be used to combine time series into a single + // time series, where the value of each data point in the resulting series is + // a function of all the already aligned values in the input time series. + // + // Not all reducer operations can be applied to all time series. The valid + // choices depend on the `metric_kind` and the `value_type` of the original + // time series. Reduction can yield a time series with a different + // `metric_kind` or `value_type` than the input time series. + // + // Time series data must first be aligned (see `per_series_aligner`) in order + // to perform cross-time series reduction. If `cross_series_reducer` is + // specified, then `per_series_aligner` must be specified, and must not be + // `ALIGN_NONE`. An `alignment_period` must also be specified; otherwise, an + // error is returned. + Reducer cross_series_reducer = 4; + + // The set of fields to preserve when `cross_series_reducer` is + // specified. The `group_by_fields` determine how the time series are + // partitioned into subsets prior to applying the aggregation + // operation. Each subset contains time series that have the same + // value for each of the grouping fields. Each individual time + // series is a member of exactly one subset. The + // `cross_series_reducer` is applied to each subset of time series. + // It is not possible to reduce across different resource types, so + // this field implicitly contains `resource.type`. Fields not + // specified in `group_by_fields` are aggregated away. If + // `group_by_fields` is not specified and all the time series have + // the same resource type, then the time series are aggregated into + // a single output time series. If `cross_series_reducer` is not + // defined, this field is ignored. + repeated string group_by_fields = 5; +} + +// Describes a ranking-based time series filter. Each input time series is +// ranked with an aligner. The filter will allow up to `num_time_series` time +// series to pass through it, selecting them based on the relative ranking. +// +// For example, if `ranking_method` is `METHOD_MEAN`,`direction` is `BOTTOM`, +// and `num_time_series` is 3, then the 3 times series with the lowest mean +// values will pass through the filter. +message PickTimeSeriesFilter { + // The value reducers that can be applied to a `PickTimeSeriesFilter`. + enum Method { + // Not allowed. You must specify a different `Method` if you specify a + // `PickTimeSeriesFilter`. + METHOD_UNSPECIFIED = 0; + + // Select the mean of all values. + METHOD_MEAN = 1; + + // Select the maximum value. + METHOD_MAX = 2; + + // Select the minimum value. + METHOD_MIN = 3; + + // Compute the sum of all values. + METHOD_SUM = 4; + + // Select the most recent value. + METHOD_LATEST = 5; + } + + // Describes the ranking directions. + enum Direction { + // Not allowed. You must specify a different `Direction` if you specify a + // `PickTimeSeriesFilter`. + DIRECTION_UNSPECIFIED = 0; + + // Pass the highest `num_time_series` ranking inputs. + TOP = 1; + + // Pass the lowest `num_time_series` ranking inputs. + BOTTOM = 2; + } + + // `ranking_method` is applied to each time series independently to produce + // the value which will be used to compare the time series to other time + // series. + Method ranking_method = 1; + + // How many time series to allow to pass through the filter. + int32 num_time_series = 2; + + // How to use the ranking to select time series that pass through the filter. + Direction direction = 3; +} + +// A filter that ranks streams based on their statistical relation to other +// streams in a request. +// Note: This field is deprecated and completely ignored by the API. +message StatisticalTimeSeriesFilter { + // The filter methods that can be applied to a stream. + enum Method { + // Not allowed in well-formed requests. + METHOD_UNSPECIFIED = 0; + + // Compute the outlier score of each stream. + METHOD_CLUSTER_OUTLIER = 1; + } + + // `rankingMethod` is applied to a set of time series, and then the produced + // value for each individual time series is used to compare a given time + // series to others. + // These are methods that cannot be applied stream-by-stream, but rather + // require the full context of a request to evaluate time series. + Method ranking_method = 1; + + // How many time series to output. + int32 num_time_series = 2; +} diff --git a/google/cloud/monitoring_dashboard/v1/proto/common_pb2.py b/google/cloud/monitoring_dashboard/v1/proto/common_pb2.py index 8edca94..9d4a467 100644 --- a/google/cloud/monitoring_dashboard/v1/proto/common_pb2.py +++ b/google/cloud/monitoring_dashboard/v1/proto/common_pb2.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,11 +15,8 @@ # limitations under the License. # Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring/dashboard_v1/proto/common.proto +# source: google/cloud/monitoring_dashboard_v1/proto/common.proto -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection @@ -30,20 +27,21 @@ _sym_db = _symbol_database.Default() +from google.api import distribution_pb2 as google_dot_api_dot_distribution__pb2 from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring/dashboard_v1/proto/common.proto", + name="google/cloud/monitoring_dashboard_v1/proto/common.proto", package="google.monitoring.dashboard.v1", syntax="proto3", - serialized_options=_b( - '\n"com.google.monitoring.dashboard.v1B\013CommonProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard' - ), - serialized_pb=_b( - '\n7google/cloud/monitoring/dashboard_v1/proto/common.proto\x12\x1egoogle.monitoring.dashboard.v1\x1a\x1egoogle/protobuf/duration.proto"\xc1\x07\n\x0b\x41ggregation\x12\x33\n\x10\x61lignment_period\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12O\n\x12per_series_aligner\x18\x02 \x01(\x0e\x32\x33.google.monitoring.dashboard.v1.Aggregation.Aligner\x12Q\n\x14\x63ross_series_reducer\x18\x04 \x01(\x0e\x32\x33.google.monitoring.dashboard.v1.Aggregation.Reducer\x12\x17\n\x0fgroup_by_fields\x18\x05 \x03(\t"\x8b\x03\n\x07\x41ligner\x12\x0e\n\nALIGN_NONE\x10\x00\x12\x0f\n\x0b\x41LIGN_DELTA\x10\x01\x12\x0e\n\nALIGN_RATE\x10\x02\x12\x15\n\x11\x41LIGN_INTERPOLATE\x10\x03\x12\x14\n\x10\x41LIGN_NEXT_OLDER\x10\x04\x12\r\n\tALIGN_MIN\x10\n\x12\r\n\tALIGN_MAX\x10\x0b\x12\x0e\n\nALIGN_MEAN\x10\x0c\x12\x0f\n\x0b\x41LIGN_COUNT\x10\r\x12\r\n\tALIGN_SUM\x10\x0e\x12\x10\n\x0c\x41LIGN_STDDEV\x10\x0f\x12\x14\n\x10\x41LIGN_COUNT_TRUE\x10\x10\x12\x15\n\x11\x41LIGN_COUNT_FALSE\x10\x18\x12\x17\n\x13\x41LIGN_FRACTION_TRUE\x10\x11\x12\x17\n\x13\x41LIGN_PERCENTILE_99\x10\x12\x12\x17\n\x13\x41LIGN_PERCENTILE_95\x10\x13\x12\x17\n\x13\x41LIGN_PERCENTILE_50\x10\x14\x12\x17\n\x13\x41LIGN_PERCENTILE_05\x10\x15\x12\x18\n\x14\x41LIGN_PERCENT_CHANGE\x10\x17"\xb1\x02\n\x07Reducer\x12\x0f\n\x0bREDUCE_NONE\x10\x00\x12\x0f\n\x0bREDUCE_MEAN\x10\x01\x12\x0e\n\nREDUCE_MIN\x10\x02\x12\x0e\n\nREDUCE_MAX\x10\x03\x12\x0e\n\nREDUCE_SUM\x10\x04\x12\x11\n\rREDUCE_STDDEV\x10\x05\x12\x10\n\x0cREDUCE_COUNT\x10\x06\x12\x15\n\x11REDUCE_COUNT_TRUE\x10\x07\x12\x16\n\x12REDUCE_COUNT_FALSE\x10\x0f\x12\x18\n\x14REDUCE_FRACTION_TRUE\x10\x08\x12\x18\n\x14REDUCE_PERCENTILE_99\x10\t\x12\x18\n\x14REDUCE_PERCENTILE_95\x10\n\x12\x18\n\x14REDUCE_PERCENTILE_50\x10\x0b\x12\x18\n\x14REDUCE_PERCENTILE_05\x10\x0c"\x8a\x03\n\x14PickTimeSeriesFilter\x12S\n\x0eranking_method\x18\x01 \x01(\x0e\x32;.google.monitoring.dashboard.v1.PickTimeSeriesFilter.Method\x12\x17\n\x0fnum_time_series\x18\x02 \x01(\x05\x12Q\n\tdirection\x18\x03 \x01(\x0e\x32>.google.monitoring.dashboard.v1.PickTimeSeriesFilter.Direction"t\n\x06Method\x12\x16\n\x12METHOD_UNSPECIFIED\x10\x00\x12\x0f\n\x0bMETHOD_MEAN\x10\x01\x12\x0e\n\nMETHOD_MAX\x10\x02\x12\x0e\n\nMETHOD_MIN\x10\x03\x12\x0e\n\nMETHOD_SUM\x10\x04\x12\x11\n\rMETHOD_LATEST\x10\x05";\n\tDirection\x12\x19\n\x15\x44IRECTION_UNSPECIFIED\x10\x00\x12\x07\n\x03TOP\x10\x01\x12\n\n\x06\x42OTTOM\x10\x02"\xd0\x01\n\x1bStatisticalTimeSeriesFilter\x12Z\n\x0eranking_method\x18\x01 \x01(\x0e\x32\x42.google.monitoring.dashboard.v1.StatisticalTimeSeriesFilter.Method\x12\x17\n\x0fnum_time_series\x18\x02 \x01(\x05"<\n\x06Method\x12\x16\n\x12METHOD_UNSPECIFIED\x10\x00\x12\x1a\n\x16METHOD_CLUSTER_OUTLIER\x10\x01\x42|\n"com.google.monitoring.dashboard.v1B\x0b\x43ommonProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboardb\x06proto3' - ), - dependencies=[google_dot_protobuf_dot_duration__pb2.DESCRIPTOR], + serialized_options=b'\n"com.google.monitoring.dashboard.v1B\013CommonProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\352\002(Google::Cloud::Monitoring::Dashboard::V1', + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n7google/cloud/monitoring_dashboard_v1/proto/common.proto\x12\x1egoogle.monitoring.dashboard.v1\x1a\x1dgoogle/api/distribution.proto\x1a\x1egoogle/protobuf/duration.proto"\xc1\x07\n\x0b\x41ggregation\x12\x33\n\x10\x61lignment_period\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12O\n\x12per_series_aligner\x18\x02 \x01(\x0e\x32\x33.google.monitoring.dashboard.v1.Aggregation.Aligner\x12Q\n\x14\x63ross_series_reducer\x18\x04 \x01(\x0e\x32\x33.google.monitoring.dashboard.v1.Aggregation.Reducer\x12\x17\n\x0fgroup_by_fields\x18\x05 \x03(\t"\x8b\x03\n\x07\x41ligner\x12\x0e\n\nALIGN_NONE\x10\x00\x12\x0f\n\x0b\x41LIGN_DELTA\x10\x01\x12\x0e\n\nALIGN_RATE\x10\x02\x12\x15\n\x11\x41LIGN_INTERPOLATE\x10\x03\x12\x14\n\x10\x41LIGN_NEXT_OLDER\x10\x04\x12\r\n\tALIGN_MIN\x10\n\x12\r\n\tALIGN_MAX\x10\x0b\x12\x0e\n\nALIGN_MEAN\x10\x0c\x12\x0f\n\x0b\x41LIGN_COUNT\x10\r\x12\r\n\tALIGN_SUM\x10\x0e\x12\x10\n\x0c\x41LIGN_STDDEV\x10\x0f\x12\x14\n\x10\x41LIGN_COUNT_TRUE\x10\x10\x12\x15\n\x11\x41LIGN_COUNT_FALSE\x10\x18\x12\x17\n\x13\x41LIGN_FRACTION_TRUE\x10\x11\x12\x17\n\x13\x41LIGN_PERCENTILE_99\x10\x12\x12\x17\n\x13\x41LIGN_PERCENTILE_95\x10\x13\x12\x17\n\x13\x41LIGN_PERCENTILE_50\x10\x14\x12\x17\n\x13\x41LIGN_PERCENTILE_05\x10\x15\x12\x18\n\x14\x41LIGN_PERCENT_CHANGE\x10\x17"\xb1\x02\n\x07Reducer\x12\x0f\n\x0bREDUCE_NONE\x10\x00\x12\x0f\n\x0bREDUCE_MEAN\x10\x01\x12\x0e\n\nREDUCE_MIN\x10\x02\x12\x0e\n\nREDUCE_MAX\x10\x03\x12\x0e\n\nREDUCE_SUM\x10\x04\x12\x11\n\rREDUCE_STDDEV\x10\x05\x12\x10\n\x0cREDUCE_COUNT\x10\x06\x12\x15\n\x11REDUCE_COUNT_TRUE\x10\x07\x12\x16\n\x12REDUCE_COUNT_FALSE\x10\x0f\x12\x18\n\x14REDUCE_FRACTION_TRUE\x10\x08\x12\x18\n\x14REDUCE_PERCENTILE_99\x10\t\x12\x18\n\x14REDUCE_PERCENTILE_95\x10\n\x12\x18\n\x14REDUCE_PERCENTILE_50\x10\x0b\x12\x18\n\x14REDUCE_PERCENTILE_05\x10\x0c"\x8a\x03\n\x14PickTimeSeriesFilter\x12S\n\x0eranking_method\x18\x01 \x01(\x0e\x32;.google.monitoring.dashboard.v1.PickTimeSeriesFilter.Method\x12\x17\n\x0fnum_time_series\x18\x02 \x01(\x05\x12Q\n\tdirection\x18\x03 \x01(\x0e\x32>.google.monitoring.dashboard.v1.PickTimeSeriesFilter.Direction"t\n\x06Method\x12\x16\n\x12METHOD_UNSPECIFIED\x10\x00\x12\x0f\n\x0bMETHOD_MEAN\x10\x01\x12\x0e\n\nMETHOD_MAX\x10\x02\x12\x0e\n\nMETHOD_MIN\x10\x03\x12\x0e\n\nMETHOD_SUM\x10\x04\x12\x11\n\rMETHOD_LATEST\x10\x05";\n\tDirection\x12\x19\n\x15\x44IRECTION_UNSPECIFIED\x10\x00\x12\x07\n\x03TOP\x10\x01\x12\n\n\x06\x42OTTOM\x10\x02"\xd0\x01\n\x1bStatisticalTimeSeriesFilter\x12Z\n\x0eranking_method\x18\x01 \x01(\x0e\x32\x42.google.monitoring.dashboard.v1.StatisticalTimeSeriesFilter.Method\x12\x17\n\x0fnum_time_series\x18\x02 \x01(\x05"<\n\x06Method\x12\x16\n\x12METHOD_UNSPECIFIED\x10\x00\x12\x1a\n\x16METHOD_CLUSTER_OUTLIER\x10\x01\x42\xa7\x01\n"com.google.monitoring.dashboard.v1B\x0b\x43ommonProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\xea\x02(Google::Cloud::Monitoring::Dashboard::V1b\x06proto3', + dependencies=[ + google_dot_api_dot_distribution__pb2.DESCRIPTOR, + google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, + ], ) @@ -52,15 +50,31 @@ full_name="google.monitoring.dashboard.v1.Aggregation.Aligner", filename=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( - name="ALIGN_NONE", index=0, number=0, serialized_options=None, type=None + name="ALIGN_NONE", + index=0, + number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="ALIGN_DELTA", index=1, number=1, serialized_options=None, type=None + name="ALIGN_DELTA", + index=1, + number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="ALIGN_RATE", index=2, number=2, serialized_options=None, type=None + name="ALIGN_RATE", + index=2, + number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( name="ALIGN_INTERPOLATE", @@ -68,6 +82,7 @@ number=3, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( name="ALIGN_NEXT_OLDER", @@ -75,24 +90,55 @@ number=4, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="ALIGN_MIN", index=5, number=10, serialized_options=None, type=None + name="ALIGN_MIN", + index=5, + number=10, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="ALIGN_MAX", index=6, number=11, serialized_options=None, type=None + name="ALIGN_MAX", + index=6, + number=11, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="ALIGN_MEAN", index=7, number=12, serialized_options=None, type=None + name="ALIGN_MEAN", + index=7, + number=12, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="ALIGN_COUNT", index=8, number=13, serialized_options=None, type=None + name="ALIGN_COUNT", + index=8, + number=13, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="ALIGN_SUM", index=9, number=14, serialized_options=None, type=None + name="ALIGN_SUM", + index=9, + number=14, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="ALIGN_STDDEV", index=10, number=15, serialized_options=None, type=None + name="ALIGN_STDDEV", + index=10, + number=15, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( name="ALIGN_COUNT_TRUE", @@ -100,6 +146,7 @@ number=16, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( name="ALIGN_COUNT_FALSE", @@ -107,6 +154,7 @@ number=24, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( name="ALIGN_FRACTION_TRUE", @@ -114,6 +162,7 @@ number=17, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( name="ALIGN_PERCENTILE_99", @@ -121,6 +170,7 @@ number=18, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( name="ALIGN_PERCENTILE_95", @@ -128,6 +178,7 @@ number=19, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( name="ALIGN_PERCENTILE_50", @@ -135,6 +186,7 @@ number=20, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( name="ALIGN_PERCENTILE_05", @@ -142,6 +194,7 @@ number=21, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( name="ALIGN_PERCENT_CHANGE", @@ -149,12 +202,13 @@ number=23, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), ], containing_type=None, serialized_options=None, - serialized_start=382, - serialized_end=777, + serialized_start=413, + serialized_end=808, ) _sym_db.RegisterEnumDescriptor(_AGGREGATION_ALIGNER) @@ -163,27 +217,63 @@ full_name="google.monitoring.dashboard.v1.Aggregation.Reducer", filename=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( - name="REDUCE_NONE", index=0, number=0, serialized_options=None, type=None + name="REDUCE_NONE", + index=0, + number=0, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="REDUCE_MEAN", index=1, number=1, serialized_options=None, type=None + name="REDUCE_MEAN", + index=1, + number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="REDUCE_MIN", index=2, number=2, serialized_options=None, type=None + name="REDUCE_MIN", + index=2, + number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="REDUCE_MAX", index=3, number=3, serialized_options=None, type=None + name="REDUCE_MAX", + index=3, + number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="REDUCE_SUM", index=4, number=4, serialized_options=None, type=None + name="REDUCE_SUM", + index=4, + number=4, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="REDUCE_STDDEV", index=5, number=5, serialized_options=None, type=None + name="REDUCE_STDDEV", + index=5, + number=5, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="REDUCE_COUNT", index=6, number=6, serialized_options=None, type=None + name="REDUCE_COUNT", + index=6, + number=6, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( name="REDUCE_COUNT_TRUE", @@ -191,6 +281,7 @@ number=7, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( name="REDUCE_COUNT_FALSE", @@ -198,6 +289,7 @@ number=15, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( name="REDUCE_FRACTION_TRUE", @@ -205,6 +297,7 @@ number=8, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( name="REDUCE_PERCENTILE_99", @@ -212,6 +305,7 @@ number=9, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( name="REDUCE_PERCENTILE_95", @@ -219,6 +313,7 @@ number=10, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( name="REDUCE_PERCENTILE_50", @@ -226,6 +321,7 @@ number=11, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( name="REDUCE_PERCENTILE_05", @@ -233,12 +329,13 @@ number=12, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), ], containing_type=None, serialized_options=None, - serialized_start=780, - serialized_end=1085, + serialized_start=811, + serialized_end=1116, ) _sym_db.RegisterEnumDescriptor(_AGGREGATION_REDUCER) @@ -247,6 +344,7 @@ full_name="google.monitoring.dashboard.v1.PickTimeSeriesFilter.Method", filename=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name="METHOD_UNSPECIFIED", @@ -254,27 +352,53 @@ number=0, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="METHOD_MEAN", index=1, number=1, serialized_options=None, type=None + name="METHOD_MEAN", + index=1, + number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="METHOD_MAX", index=2, number=2, serialized_options=None, type=None + name="METHOD_MAX", + index=2, + number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="METHOD_MIN", index=3, number=3, serialized_options=None, type=None + name="METHOD_MIN", + index=3, + number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="METHOD_SUM", index=4, number=4, serialized_options=None, type=None + name="METHOD_SUM", + index=4, + number=4, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="METHOD_LATEST", index=5, number=5, serialized_options=None, type=None + name="METHOD_LATEST", + index=5, + number=5, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), ], containing_type=None, serialized_options=None, - serialized_start=1305, - serialized_end=1421, + serialized_start=1336, + serialized_end=1452, ) _sym_db.RegisterEnumDescriptor(_PICKTIMESERIESFILTER_METHOD) @@ -283,6 +407,7 @@ full_name="google.monitoring.dashboard.v1.PickTimeSeriesFilter.Direction", filename=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name="DIRECTION_UNSPECIFIED", @@ -290,18 +415,29 @@ number=0, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="TOP", index=1, number=1, serialized_options=None, type=None + name="TOP", + index=1, + number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="BOTTOM", index=2, number=2, serialized_options=None, type=None + name="BOTTOM", + index=2, + number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), ], containing_type=None, serialized_options=None, - serialized_start=1423, - serialized_end=1482, + serialized_start=1454, + serialized_end=1513, ) _sym_db.RegisterEnumDescriptor(_PICKTIMESERIESFILTER_DIRECTION) @@ -310,6 +446,7 @@ full_name="google.monitoring.dashboard.v1.StatisticalTimeSeriesFilter.Method", filename=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name="METHOD_UNSPECIFIED", @@ -317,6 +454,7 @@ number=0, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( name="METHOD_CLUSTER_OUTLIER", @@ -324,12 +462,13 @@ number=1, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), ], containing_type=None, serialized_options=None, - serialized_start=1633, - serialized_end=1693, + serialized_start=1664, + serialized_end=1724, ) _sym_db.RegisterEnumDescriptor(_STATISTICALTIMESERIESFILTER_METHOD) @@ -340,6 +479,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="alignment_period", @@ -358,6 +498,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="per_series_aligner", @@ -376,6 +517,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="cross_series_reducer", @@ -394,6 +536,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="group_by_fields", @@ -412,18 +555,19 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], nested_types=[], - enum_types=[_AGGREGATION_ALIGNER, _AGGREGATION_REDUCER], + enum_types=[_AGGREGATION_ALIGNER, _AGGREGATION_REDUCER,], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=124, - serialized_end=1085, + serialized_start=155, + serialized_end=1116, ) @@ -433,6 +577,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="ranking_method", @@ -451,6 +596,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="num_time_series", @@ -469,6 +615,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="direction", @@ -487,18 +634,19 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], nested_types=[], - enum_types=[_PICKTIMESERIESFILTER_METHOD, _PICKTIMESERIESFILTER_DIRECTION], + enum_types=[_PICKTIMESERIESFILTER_METHOD, _PICKTIMESERIESFILTER_DIRECTION,], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1088, - serialized_end=1482, + serialized_start=1119, + serialized_end=1513, ) @@ -508,6 +656,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="ranking_method", @@ -526,6 +675,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="num_time_series", @@ -544,18 +694,19 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], nested_types=[], - enum_types=[_STATISTICALTIMESERIESFILTER_METHOD], + enum_types=[_STATISTICALTIMESERIESFILTER_METHOD,], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1485, - serialized_end=1693, + serialized_start=1516, + serialized_end=1724, ) _AGGREGATION.fields_by_name[ @@ -589,63 +740,91 @@ (_message.Message,), { "DESCRIPTOR": _AGGREGATION, - "__module__": "google.cloud.monitoring.dashboard_v1.proto.common_pb2", - "__doc__": """Describes how to combine multiple time series to provide - different views of the data. Aggregation consists of an alignment step - on individual time series (``alignment_period`` and - ``per_series_aligner``) followed by an optional reduction step of the - data across the aligned time series (``cross_series_reducer`` and - ``group_by_fields``). For more details, see - `Aggregation `__. - + "__module__": "google.cloud.monitoring_dashboard.v1.proto.common_pb2", + "__doc__": """Describes how to combine multiple time series to provide a different + view of the data. Aggregation of time series is done in two steps. + First, each time series in the set is *aligned* to the same time + interval boundaries, then the set of time series is optionally + *reduced* in number. Alignment consists of applying the + ``per_series_aligner`` operation to each time series after its data + has been divided into regular ``alignment_period`` time intervals. + This process takes *all* of the data points in an alignment period, + applies a mathematical transformation such as averaging, minimum, + maximum, delta, etc., and converts them into a single data point per + period. Reduction is when the aligned and transformed time series can + optionally be combined, reducing the number of time series through + similar mathematical transformations. Reduction involves applying a + ``cross_series_reducer`` to all the time series, optionally sorting + the time series into subsets with ``group_by_fields``, and applying + the reducer to each subset. The raw time series data can contain a + huge amount of information from multiple sources. Alignment and + reduction transforms this mass of data into a more manageable and + representative collection of data, for example “the 95% latency across + the average of all tasks in a cluster”. This representative data can + be more easily graphed and comprehended, and the individual time + series data is still available for later drilldown. For more details, + see `Filtering and aggregation + `__. Attributes: alignment_period: - The alignment period for per-[time series][TimeSeries] - alignment. If present, ``alignmentPeriod`` must be at least 60 - seconds. After per-time series alignment, each time series - will contain data points only on the period boundaries. If - ``perSeriesAligner`` is not specified or equals - ``ALIGN_NONE``, then this field is ignored. If - ``perSeriesAligner`` is specified and does not equal - ``ALIGN_NONE``, then this field must be defined; otherwise an - error is returned. + The ``alignment_period`` specifies a time interval, in + seconds, that is used to divide the data in all the [time + series][google.monitoring.v3.TimeSeries] into consistent + blocks of time. This will be done before the per-series + aligner can be applied to the data. The value must be at + least 60 seconds. If a per-series aligner other than + ``ALIGN_NONE`` is specified, this field is required or an + error is returned. If no per-series aligner is specified, or + the aligner ``ALIGN_NONE`` is specified, then this field is + ignored. per_series_aligner: - The approach to be used to align individual time series. Not - all alignment functions may be applied to all time series, - depending on the metric type and value type of the original - time series. Alignment may change the metric type or the value - type of the time series. Time series data must be aligned in - order to perform cross-time series reduction. If - ``crossSeriesReducer`` is specified, then ``perSeriesAligner`` - must be specified and not equal ``ALIGN_NONE`` and - ``alignmentPeriod`` must be specified; otherwise, an error is - returned. + An ``Aligner`` describes how to bring the data points in a + single time series into temporal alignment. Except for + ``ALIGN_NONE``, all alignments cause all the data points in an + ``alignment_period`` to be mathematically grouped together, + resulting in a single data point for each ``alignment_period`` + with end timestamp at the end of the period. Not all + alignment operations may be applied to all time series. The + valid choices depend on the ``metric_kind`` and ``value_type`` + of the original time series. Alignment can change the + ``metric_kind`` or the ``value_type`` of the time series. + Time series data must be aligned in order to perform cross- + time series reduction. If ``cross_series_reducer`` is + specified, then ``per_series_aligner`` must be specified and + not equal to ``ALIGN_NONE`` and ``alignment_period`` must be + specified; otherwise, an error is returned. cross_series_reducer: - The approach to be used to combine time series. Not all - reducer functions may be applied to all time series, depending - on the metric type and the value type of the original time - series. Reduction may change the metric type of value type of - the time series. Time series data must be aligned in order to - perform cross-time series reduction. If ``crossSeriesReducer`` - is specified, then ``perSeriesAligner`` must be specified and - not equal ``ALIGN_NONE`` and ``alignmentPeriod`` must be + The reduction operation to be used to combine time series into + a single time series, where the value of each data point in + the resulting series is a function of all the already aligned + values in the input time series. Not all reducer operations + can be applied to all time series. The valid choices depend on + the ``metric_kind`` and the ``value_type`` of the original + time series. Reduction can yield a time series with a + different ``metric_kind`` or ``value_type`` than the input + time series. Time series data must first be aligned (see + ``per_series_aligner``) in order to perform cross-time series + reduction. If ``cross_series_reducer`` is specified, then + ``per_series_aligner`` must be specified, and must not be + ``ALIGN_NONE``. An ``alignment_period`` must also be specified; otherwise, an error is returned. group_by_fields: - The set of fields to preserve when ``crossSeriesReducer`` is - specified. The ``groupByFields`` determine how the time series - are partitioned into subsets prior to applying the aggregation - function. Each subset contains time series that have the same - value for each of the grouping fields. Each individual time - series is a member of exactly one subset. The - ``crossSeriesReducer`` is applied to each subset of time + The set of fields to preserve when ``cross_series_reducer`` is + specified. The ``group_by_fields`` determine how the time + series are partitioned into subsets prior to applying the + aggregation operation. Each subset contains time series that + have the same value for each of the grouping fields. Each + individual time series is a member of exactly one subset. The + ``cross_series_reducer`` is applied to each subset of time series. It is not possible to reduce across different resource types, so this field implicitly contains ``resource.type``. - Fields not specified in ``groupByFields`` are aggregated away. - If ``groupByFields`` is not specified and all the time series - have the same resource type, then the time series are + Fields not specified in ``group_by_fields`` are aggregated + away. If ``group_by_fields`` is not specified and all the time + series have the same resource type, then the time series are aggregated into a single output time series. If - ``crossSeriesReducer`` is not defined, this field is ignored. + ``cross_series_reducer`` is not defined, this field is + ignored. """, # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.Aggregation) }, @@ -657,20 +836,22 @@ (_message.Message,), { "DESCRIPTOR": _PICKTIMESERIESFILTER, - "__module__": "google.cloud.monitoring.dashboard_v1.proto.common_pb2", - "__doc__": """Describes a ranking-based time series filter. Each input - time series is ranked with an aligner. The filter lets through up to - ``num_time_series`` time series, selecting them based on the relative - ranking. - + "__module__": "google.cloud.monitoring_dashboard.v1.proto.common_pb2", + "__doc__": """Describes a ranking-based time series filter. Each input time series + is ranked with an aligner. The filter will allow up to + ``num_time_series`` time series to pass through it, selecting them + based on the relative ranking. For example, if ``ranking_method`` is + ``METHOD_MEAN``,\ ``direction`` is ``BOTTOM``, and ``num_time_series`` + is 3, then the 3 times series with the lowest mean values will pass + through the filter. Attributes: ranking_method: - \ ``rankingMethod`` is applied to each time series + \ ``ranking_method`` is applied to each time series independently to produce the value which will be used to compare the time series to other time series. num_time_series: - How many time series to return. + How many time series to allow to pass through the filter. direction: How to use the ranking to select time series that pass through the filter. @@ -685,10 +866,10 @@ (_message.Message,), { "DESCRIPTOR": _STATISTICALTIMESERIESFILTER, - "__module__": "google.cloud.monitoring.dashboard_v1.proto.common_pb2", - "__doc__": """A filter that ranks streams based on their statistical - relation to other streams in a request. - + "__module__": "google.cloud.monitoring_dashboard.v1.proto.common_pb2", + "__doc__": """A filter that ranks streams based on their statistical relation to + other streams in a request. Note: This field is deprecated and + completely ignored by the API. Attributes: ranking_method: diff --git a/google/cloud/monitoring_dashboard/v1/proto/common_pb2_grpc.py b/google/cloud/monitoring_dashboard/v1/proto/common_pb2_grpc.py index 7343170..b662812 100644 --- a/google/cloud/monitoring_dashboard/v1/proto/common_pb2_grpc.py +++ b/google/cloud/monitoring_dashboard/v1/proto/common_pb2_grpc.py @@ -1,4 +1,4 @@ -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/monitoring_dashboard/v1/proto/dashboard.proto b/google/cloud/monitoring_dashboard/v1/proto/dashboard.proto new file mode 100644 index 0000000..7a25776 --- /dev/null +++ b/google/cloud/monitoring_dashboard/v1/proto/dashboard.proto @@ -0,0 +1,66 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.monitoring.dashboard.v1; + +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/monitoring/dashboard/v1/layouts.proto"; + +option go_package = "google.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard"; +option java_multiple_files = true; +option java_outer_classname = "DashboardsProto"; +option java_package = "com.google.monitoring.dashboard.v1"; +option ruby_package = "Google::Cloud::Monitoring::Dashboard::V1"; + +// A Google Stackdriver dashboard. Dashboards define the content and layout +// of pages in the Stackdriver web application. +message Dashboard { + option (google.api.resource) = { + type: "monitoring.googleapis.com/Dashboard" + pattern: "projects/{project}/dashboards/{dashboard}" + }; + + // Immutable. The resource name of the dashboard. + string name = 1 [(google.api.field_behavior) = IMMUTABLE]; + + // Required. The mutable, human-readable name. + string display_name = 2 [(google.api.field_behavior) = REQUIRED]; + + // `etag` is used for optimistic concurrency control as a way to help + // prevent simultaneous updates of a policy from overwriting each other. + // An `etag` is returned in the response to `GetDashboard`, and + // users are expected to put that etag in the request to `UpdateDashboard` to + // ensure that their change will be applied to the same version of the + // Dashboard configuration. The field should not be passed during + // dashboard creation. + string etag = 4; + + // A dashboard's root container element that defines the layout style. + oneof layout { + // Content is arranged with a basic layout that re-flows a simple list of + // informational elements like widgets or tiles. + GridLayout grid_layout = 5; + + // The content is divided into equally spaced rows and the widgets are + // arranged horizontally. + RowLayout row_layout = 8; + + // The content is divided into equally spaced columns and the widgets are + // arranged vertically. + ColumnLayout column_layout = 9; + } +} diff --git a/google/cloud/monitoring_dashboard/v1/proto/dashboard_pb2.py b/google/cloud/monitoring_dashboard/v1/proto/dashboard_pb2.py index 58b0324..b137429 100644 --- a/google/cloud/monitoring_dashboard/v1/proto/dashboard_pb2.py +++ b/google/cloud/monitoring_dashboard/v1/proto/dashboard_pb2.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,11 +15,8 @@ # limitations under the License. # Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring/dashboard_v1/proto/dashboard.proto +# source: google/cloud/monitoring_dashboard_v1/proto/dashboard.proto -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection @@ -30,23 +27,24 @@ _sym_db = _symbol_database.Default() +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.monitoring_dashboard.v1.proto import ( - layouts_pb2 as google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_layouts__pb2, + layouts_pb2 as google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_layouts__pb2, ) DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring/dashboard_v1/proto/dashboard.proto", + name="google/cloud/monitoring_dashboard_v1/proto/dashboard.proto", package="google.monitoring.dashboard.v1", syntax="proto3", - serialized_options=_b( - '\n"com.google.monitoring.dashboard.v1B\017DashboardsProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard' - ), - serialized_pb=_b( - '\n:google/cloud/monitoring/dashboard_v1/proto/dashboard.proto\x12\x1egoogle.monitoring.dashboard.v1\x1a\x38google/cloud/monitoring/dashboard_v1/proto/layouts.proto"\x92\x02\n\tDashboard\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x14\n\x0c\x64isplay_name\x18\x02 \x01(\t\x12\x0c\n\x04\x65tag\x18\x04 \x01(\t\x12\x41\n\x0bgrid_layout\x18\x05 \x01(\x0b\x32*.google.monitoring.dashboard.v1.GridLayoutH\x00\x12?\n\nrow_layout\x18\x08 \x01(\x0b\x32).google.monitoring.dashboard.v1.RowLayoutH\x00\x12\x45\n\rcolumn_layout\x18\t \x01(\x0b\x32,.google.monitoring.dashboard.v1.ColumnLayoutH\x00\x42\x08\n\x06layoutB\x80\x01\n"com.google.monitoring.dashboard.v1B\x0f\x44\x61shboardsProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboardb\x06proto3' - ), + serialized_options=b'\n"com.google.monitoring.dashboard.v1B\017DashboardsProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\352\002(Google::Cloud::Monitoring::Dashboard::V1', + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n:google/cloud/monitoring_dashboard_v1/proto/dashboard.proto\x12\x1egoogle.monitoring.dashboard.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x38google/cloud/monitoring_dashboard_v1/proto/layouts.proto"\xf1\x02\n\tDashboard\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x05\x12\x19\n\x0c\x64isplay_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x0c\n\x04\x65tag\x18\x04 \x01(\t\x12\x41\n\x0bgrid_layout\x18\x05 \x01(\x0b\x32*.google.monitoring.dashboard.v1.GridLayoutH\x00\x12?\n\nrow_layout\x18\x08 \x01(\x0b\x32).google.monitoring.dashboard.v1.RowLayoutH\x00\x12\x45\n\rcolumn_layout\x18\t \x01(\x0b\x32,.google.monitoring.dashboard.v1.ColumnLayoutH\x00:S\xea\x41P\n#monitoring.googleapis.com/Dashboard\x12)projects/{project}/dashboards/{dashboard}B\x08\n\x06layoutB\xab\x01\n"com.google.monitoring.dashboard.v1B\x0f\x44\x61shboardsProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\xea\x02(Google::Cloud::Monitoring::Dashboard::V1b\x06proto3', dependencies=[ - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_layouts__pb2.DESCRIPTOR + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_layouts__pb2.DESCRIPTOR, ], ) @@ -57,6 +55,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="name", @@ -67,14 +66,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=b"\340A\005", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="display_name", @@ -85,14 +85,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=b"\340A\002", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="etag", @@ -103,7 +104,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -111,6 +112,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="grid_layout", @@ -129,6 +131,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="row_layout", @@ -147,6 +150,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="column_layout", @@ -165,12 +169,13 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], nested_types=[], enum_types=[], - serialized_options=None, + serialized_options=b"\352AP\n#monitoring.googleapis.com/Dashboard\022)projects/{project}/dashboards/{dashboard}", is_extendable=False, syntax="proto3", extension_ranges=[], @@ -180,27 +185,28 @@ full_name="google.monitoring.dashboard.v1.Dashboard.layout", index=0, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[], - ) + ), ], - serialized_start=153, - serialized_end=427, + serialized_start=213, + serialized_end=582, ) _DASHBOARD.fields_by_name[ "grid_layout" ].message_type = ( - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_layouts__pb2._GRIDLAYOUT + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_layouts__pb2._GRIDLAYOUT ) _DASHBOARD.fields_by_name[ "row_layout" ].message_type = ( - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_layouts__pb2._ROWLAYOUT + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_layouts__pb2._ROWLAYOUT ) _DASHBOARD.fields_by_name[ "column_layout" ].message_type = ( - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_layouts__pb2._COLUMNLAYOUT + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_layouts__pb2._COLUMNLAYOUT ) _DASHBOARD.oneofs_by_name["layout"].fields.append( _DASHBOARD.fields_by_name["grid_layout"] @@ -228,16 +234,15 @@ (_message.Message,), { "DESCRIPTOR": _DASHBOARD, - "__module__": "google.cloud.monitoring.dashboard_v1.proto.dashboard_pb2", - "__doc__": """A Google Stackdriver dashboard. Dashboards define the - content and layout of pages in the Stackdriver web application. - + "__module__": "google.cloud.monitoring_dashboard.v1.proto.dashboard_pb2", + "__doc__": """A Google Stackdriver dashboard. Dashboards define the content and + layout of pages in the Stackdriver web application. Attributes: name: - The resource name of the dashboard. + Immutable. The resource name of the dashboard. display_name: - The mutable, human-readable name. + Required. The mutable, human-readable name. etag: \ ``etag`` is used for optimistic concurrency control as a way to help prevent simultaneous updates of a policy from @@ -267,4 +272,7 @@ DESCRIPTOR._options = None +_DASHBOARD.fields_by_name["name"]._options = None +_DASHBOARD.fields_by_name["display_name"]._options = None +_DASHBOARD._options = None # @@protoc_insertion_point(module_scope) diff --git a/google/cloud/monitoring_dashboard/v1/proto/dashboard_pb2_grpc.py b/google/cloud/monitoring_dashboard/v1/proto/dashboard_pb2_grpc.py index 7343170..b662812 100644 --- a/google/cloud/monitoring_dashboard/v1/proto/dashboard_pb2_grpc.py +++ b/google/cloud/monitoring_dashboard/v1/proto/dashboard_pb2_grpc.py @@ -1,4 +1,4 @@ -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/monitoring_dashboard/v1/proto/dashboards_service.proto b/google/cloud/monitoring_dashboard/v1/proto/dashboards_service.proto new file mode 100644 index 0000000..a7cbef5 --- /dev/null +++ b/google/cloud/monitoring_dashboard/v1/proto/dashboards_service.proto @@ -0,0 +1,179 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.monitoring.dashboard.v1; + +import "google/api/annotations.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/monitoring/dashboard/v1/dashboard.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; +import "google/api/client.proto"; + +option go_package = "google.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard"; +option java_multiple_files = true; +option java_outer_classname = "DashboardsServiceProto"; +option java_package = "com.google.monitoring.dashboard.v1"; +option ruby_package = "Google::Cloud::Monitoring::Dashboard::V1"; + +// Manages Stackdriver dashboards. A dashboard is an arrangement of data display +// widgets in a specific layout. +service DashboardsService { + option (google.api.default_host) = "monitoring.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform," + "https://www.googleapis.com/auth/monitoring," + "https://www.googleapis.com/auth/monitoring.read," + "https://www.googleapis.com/auth/monitoring.write"; + + // Creates a new custom dashboard. + // + // This method requires the `monitoring.dashboards.create` permission + // on the specified project. For more information, see + // [Google Cloud IAM](https://cloud.google.com/iam). + rpc CreateDashboard(CreateDashboardRequest) returns (Dashboard) { + option (google.api.http) = { + post: "/v1/{parent=projects/*}/dashboards" + body: "dashboard" + }; + } + + // Lists the existing dashboards. + // + // This method requires the `monitoring.dashboards.list` permission + // on the specified project. For more information, see + // [Google Cloud IAM](https://cloud.google.com/iam). + rpc ListDashboards(ListDashboardsRequest) returns (ListDashboardsResponse) { + option (google.api.http) = { + get: "/v1/{parent=projects/*}/dashboards" + }; + } + + // Fetches a specific dashboard. + // + // This method requires the `monitoring.dashboards.get` permission + // on the specified dashboard. For more information, see + // [Google Cloud IAM](https://cloud.google.com/iam). + rpc GetDashboard(GetDashboardRequest) returns (Dashboard) { + option (google.api.http) = { + get: "/v1/{name=projects/*/dashboards/*}" + }; + } + + // Deletes an existing custom dashboard. + // + // This method requires the `monitoring.dashboards.delete` permission + // on the specified dashboard. For more information, see + // [Google Cloud IAM](https://cloud.google.com/iam). + rpc DeleteDashboard(DeleteDashboardRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1/{name=projects/*/dashboards/*}" + }; + } + + // Replaces an existing custom dashboard with a new definition. + // + // This method requires the `monitoring.dashboards.update` permission + // on the specified dashboard. For more information, see + // [Google Cloud IAM](https://cloud.google.com/iam). + rpc UpdateDashboard(UpdateDashboardRequest) returns (Dashboard) { + option (google.api.http) = { + patch: "/v1/{dashboard.name=projects/*/dashboards/*}" + body: "dashboard" + }; + } +} + +// The `CreateDashboard` request. +message CreateDashboardRequest { + // Required. The project on which to execute the request. The format is: + // + // projects/[PROJECT_ID_OR_NUMBER] + // + // The `[PROJECT_ID_OR_NUMBER]` must match the dashboard resource name. + string parent = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The initial dashboard specification. + Dashboard dashboard = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// The `ListDashboards` request. +message ListDashboardsRequest { + // Required. The scope of the dashboards to list. The format is: + // + // projects/[PROJECT_ID_OR_NUMBER] + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudresourcemanager.googleapis.com/Project" + } + ]; + + // A positive number that is the maximum number of results to return. + // If unspecified, a default of 1000 is used. + int32 page_size = 2; + + // If this field is not empty then it must contain the `nextPageToken` value + // returned by a previous call to this method. Using this field causes the + // method to return additional results from the previous method call. + string page_token = 3; +} + +// The `ListDashboards` request. +message ListDashboardsResponse { + // The list of requested dashboards. + repeated Dashboard dashboards = 1; + + // If there are more results than have been returned, then this field is set + // to a non-empty value. To see the additional results, + // use that value as `page_token` in the next call to this method. + string next_page_token = 2; +} + +// The `GetDashboard` request. +message GetDashboardRequest { + // Required. The resource name of the Dashboard. The format is one of: + // + // - `dashboards/[DASHBOARD_ID]` (for system dashboards) + // - `projects/[PROJECT_ID_OR_NUMBER]/dashboards/[DASHBOARD_ID]` + // (for custom dashboards). + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "monitoring.googleapis.com/Dashboard" + } + ]; +} + +// The `DeleteDashboard` request. +message DeleteDashboardRequest { + // Required. The resource name of the Dashboard. The format is: + // + // projects/[PROJECT_ID_OR_NUMBER]/dashboards/[DASHBOARD_ID] + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "monitoring.googleapis.com/Dashboard" + } + ]; +} + +// The `UpdateDashboard` request. +message UpdateDashboardRequest { + // Required. The dashboard that will replace the existing dashboard. + Dashboard dashboard = 1 [(google.api.field_behavior) = REQUIRED]; +} diff --git a/google/cloud/monitoring_dashboard/v1/proto/dashboards_service_pb2.py b/google/cloud/monitoring_dashboard/v1/proto/dashboards_service_pb2.py index 81e679e..b586889 100644 --- a/google/cloud/monitoring_dashboard/v1/proto/dashboards_service_pb2.py +++ b/google/cloud/monitoring_dashboard/v1/proto/dashboards_service_pb2.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,11 +15,8 @@ # limitations under the License. # Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring/dashboard_v1/proto/dashboards_service.proto +# source: google/cloud/monitoring_dashboard_v1/proto/dashboards_service.proto -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection @@ -32,8 +29,9 @@ from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 from google.cloud.monitoring_dashboard.v1.proto import ( - dashboard_pb2 as google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_dashboard__pb2, + dashboard_pb2 as google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboard__pb2, ) from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 @@ -41,19 +39,17 @@ DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring/dashboard_v1/proto/dashboards_service.proto", + name="google/cloud/monitoring_dashboard_v1/proto/dashboards_service.proto", package="google.monitoring.dashboard.v1", syntax="proto3", - serialized_options=_b( - '\n"com.google.monitoring.dashboard.v1B\026DashboardsServiceProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard' - ), - serialized_pb=_b( - '\nCgoogle/cloud/monitoring/dashboard_v1/proto/dashboards_service.proto\x12\x1egoogle.monitoring.dashboard.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a:google/cloud/monitoring/dashboard_v1/proto/dashboard.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x17google/api/client.proto"p\n\x16\x43reateDashboardRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x41\n\tdashboard\x18\x02 \x01(\x0b\x32).google.monitoring.dashboard.v1.DashboardB\x03\xe0\x41\x02"S\n\x15ListDashboardsRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"p\n\x16ListDashboardsResponse\x12=\n\ndashboards\x18\x01 \x03(\x0b\x32).google.monitoring.dashboard.v1.Dashboard\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"(\n\x13GetDashboardRequest\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02"+\n\x16\x44\x65leteDashboardRequest\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02"[\n\x16UpdateDashboardRequest\x12\x41\n\tdashboard\x18\x01 \x01(\x0b\x32).google.monitoring.dashboard.v1.DashboardB\x03\xe0\x41\x02\x32\xb1\x08\n\x11\x44\x61shboardsService\x12\xab\x01\n\x0f\x43reateDashboard\x12\x36.google.monitoring.dashboard.v1.CreateDashboardRequest\x1a).google.monitoring.dashboard.v1.Dashboard"5\x82\xd3\xe4\x93\x02/""/v1/{parent=projects/*}/dashboards:\tdashboard\x12\xab\x01\n\x0eListDashboards\x12\x35.google.monitoring.dashboard.v1.ListDashboardsRequest\x1a\x36.google.monitoring.dashboard.v1.ListDashboardsResponse"*\x82\xd3\xe4\x93\x02$\x12"/v1/{parent=projects/*}/dashboards\x12\x9a\x01\n\x0cGetDashboard\x12\x33.google.monitoring.dashboard.v1.GetDashboardRequest\x1a).google.monitoring.dashboard.v1.Dashboard"*\x82\xd3\xe4\x93\x02$\x12"/v1/{name=projects/*/dashboards/*}\x12\x8d\x01\n\x0f\x44\x65leteDashboard\x12\x36.google.monitoring.dashboard.v1.DeleteDashboardRequest\x1a\x16.google.protobuf.Empty"*\x82\xd3\xe4\x93\x02$*"/v1/{name=projects/*/dashboards/*}\x12\xb5\x01\n\x0fUpdateDashboard\x12\x36.google.monitoring.dashboard.v1.UpdateDashboardRequest\x1a).google.monitoring.dashboard.v1.Dashboard"?\x82\xd3\xe4\x93\x02\x39\x32,/v1/{dashboard.name=projects/*/dashboards/*}:\tdashboard\x1a\xda\x01\xca\x41\x19monitoring.googleapis.com\xd2\x41\xba\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/monitoring,https://www.googleapis.com/auth/monitoring.read,https://www.googleapis.com/auth/monitoring.writeB\x87\x01\n"com.google.monitoring.dashboard.v1B\x16\x44\x61shboardsServiceProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboardb\x06proto3' - ), + serialized_options=b'\n"com.google.monitoring.dashboard.v1B\026DashboardsServiceProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\352\002(Google::Cloud::Monitoring::Dashboard::V1', + create_key=_descriptor._internal_create_key, + serialized_pb=b'\nCgoogle/cloud/monitoring_dashboard_v1/proto/dashboards_service.proto\x12\x1egoogle.monitoring.dashboard.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a:google/cloud/monitoring_dashboard_v1/proto/dashboard.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x17google/api/client.proto"p\n\x16\x43reateDashboardRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x41\n\tdashboard\x18\x02 \x01(\x0b\x32).google.monitoring.dashboard.v1.DashboardB\x03\xe0\x41\x02"\x83\x01\n\x15ListDashboardsRequest\x12\x43\n\x06parent\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"p\n\x16ListDashboardsResponse\x12=\n\ndashboards\x18\x01 \x03(\x0b\x32).google.monitoring.dashboard.v1.Dashboard\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"P\n\x13GetDashboardRequest\x12\x39\n\x04name\x18\x01 \x01(\tB+\xe0\x41\x02\xfa\x41%\n#monitoring.googleapis.com/Dashboard"S\n\x16\x44\x65leteDashboardRequest\x12\x39\n\x04name\x18\x01 \x01(\tB+\xe0\x41\x02\xfa\x41%\n#monitoring.googleapis.com/Dashboard"[\n\x16UpdateDashboardRequest\x12\x41\n\tdashboard\x18\x01 \x01(\x0b\x32).google.monitoring.dashboard.v1.DashboardB\x03\xe0\x41\x02\x32\xb1\x08\n\x11\x44\x61shboardsService\x12\xab\x01\n\x0f\x43reateDashboard\x12\x36.google.monitoring.dashboard.v1.CreateDashboardRequest\x1a).google.monitoring.dashboard.v1.Dashboard"5\x82\xd3\xe4\x93\x02/""/v1/{parent=projects/*}/dashboards:\tdashboard\x12\xab\x01\n\x0eListDashboards\x12\x35.google.monitoring.dashboard.v1.ListDashboardsRequest\x1a\x36.google.monitoring.dashboard.v1.ListDashboardsResponse"*\x82\xd3\xe4\x93\x02$\x12"/v1/{parent=projects/*}/dashboards\x12\x9a\x01\n\x0cGetDashboard\x12\x33.google.monitoring.dashboard.v1.GetDashboardRequest\x1a).google.monitoring.dashboard.v1.Dashboard"*\x82\xd3\xe4\x93\x02$\x12"/v1/{name=projects/*/dashboards/*}\x12\x8d\x01\n\x0f\x44\x65leteDashboard\x12\x36.google.monitoring.dashboard.v1.DeleteDashboardRequest\x1a\x16.google.protobuf.Empty"*\x82\xd3\xe4\x93\x02$*"/v1/{name=projects/*/dashboards/*}\x12\xb5\x01\n\x0fUpdateDashboard\x12\x36.google.monitoring.dashboard.v1.UpdateDashboardRequest\x1a).google.monitoring.dashboard.v1.Dashboard"?\x82\xd3\xe4\x93\x02\x39\x32,/v1/{dashboard.name=projects/*/dashboards/*}:\tdashboard\x1a\xda\x01\xca\x41\x19monitoring.googleapis.com\xd2\x41\xba\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/monitoring,https://www.googleapis.com/auth/monitoring.read,https://www.googleapis.com/auth/monitoring.writeB\xb2\x01\n"com.google.monitoring.dashboard.v1B\x16\x44\x61shboardsServiceProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\xea\x02(Google::Cloud::Monitoring::Dashboard::V1b\x06proto3', dependencies=[ google_dot_api_dot_annotations__pb2.DESCRIPTOR, google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_dashboard__pb2.DESCRIPTOR, + google_dot_api_dot_resource__pb2.DESCRIPTOR, + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboard__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, google_dot_api_dot_client__pb2.DESCRIPTOR, @@ -67,6 +63,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="parent", @@ -77,14 +74,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\002"), + serialized_options=b"\340A\002", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="dashboard", @@ -101,8 +99,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\002"), + serialized_options=b"\340A\002", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -113,8 +112,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=314, - serialized_end=426, + serialized_start=341, + serialized_end=453, ) @@ -124,6 +123,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="parent", @@ -134,14 +134,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\002"), + serialized_options=b"\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="page_size", @@ -160,6 +161,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="page_token", @@ -170,7 +172,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -178,6 +180,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -188,8 +191,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=428, - serialized_end=511, + serialized_start=456, + serialized_end=587, ) @@ -199,6 +202,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="dashboards", @@ -217,6 +221,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="next_page_token", @@ -227,7 +232,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -235,6 +240,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -245,8 +251,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=513, - serialized_end=625, + serialized_start=589, + serialized_end=701, ) @@ -256,6 +262,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="name", @@ -266,15 +273,16 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\002"), + serialized_options=b"\340A\002\372A%\n#monitoring.googleapis.com/Dashboard", file=DESCRIPTOR, - ) + create_key=_descriptor._internal_create_key, + ), ], extensions=[], nested_types=[], @@ -284,8 +292,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=627, - serialized_end=667, + serialized_start=703, + serialized_end=783, ) @@ -295,6 +303,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="name", @@ -305,15 +314,16 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\002"), + serialized_options=b"\340A\002\372A%\n#monitoring.googleapis.com/Dashboard", file=DESCRIPTOR, - ) + create_key=_descriptor._internal_create_key, + ), ], extensions=[], nested_types=[], @@ -323,8 +333,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=669, - serialized_end=712, + serialized_start=785, + serialized_end=868, ) @@ -334,6 +344,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="dashboard", @@ -350,9 +361,10 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\002"), + serialized_options=b"\340A\002", file=DESCRIPTOR, - ) + create_key=_descriptor._internal_create_key, + ), ], extensions=[], nested_types=[], @@ -362,24 +374,24 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=714, - serialized_end=805, + serialized_start=870, + serialized_end=961, ) _CREATEDASHBOARDREQUEST.fields_by_name[ "dashboard" ].message_type = ( - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_dashboard__pb2._DASHBOARD + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboard__pb2._DASHBOARD ) _LISTDASHBOARDSRESPONSE.fields_by_name[ "dashboards" ].message_type = ( - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_dashboard__pb2._DASHBOARD + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboard__pb2._DASHBOARD ) _UPDATEDASHBOARDREQUEST.fields_by_name[ "dashboard" ].message_type = ( - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_dashboard__pb2._DASHBOARD + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboard__pb2._DASHBOARD ) DESCRIPTOR.message_types_by_name["CreateDashboardRequest"] = _CREATEDASHBOARDREQUEST DESCRIPTOR.message_types_by_name["ListDashboardsRequest"] = _LISTDASHBOARDSREQUEST @@ -394,15 +406,15 @@ (_message.Message,), { "DESCRIPTOR": _CREATEDASHBOARDREQUEST, - "__module__": "google.cloud.monitoring.dashboard_v1.proto.dashboards_service_pb2", + "__module__": "google.cloud.monitoring_dashboard.v1.proto.dashboards_service_pb2", "__doc__": """The ``CreateDashboard`` request. - Attributes: parent: Required. The project on which to execute the request. The - format is ``"projects/{project_id_or_number}"``. The - {project_id_or_number} must match the dashboard resource name. + format is: :: projects/[PROJECT_ID_OR_NUMBER] The + ``[PROJECT_ID_OR_NUMBER]`` must match the dashboard resource + name. dashboard: Required. The initial dashboard specification. """, @@ -416,15 +428,13 @@ (_message.Message,), { "DESCRIPTOR": _LISTDASHBOARDSREQUEST, - "__module__": "google.cloud.monitoring.dashboard_v1.proto.dashboards_service_pb2", + "__module__": "google.cloud.monitoring_dashboard.v1.proto.dashboards_service_pb2", "__doc__": """The ``ListDashboards`` request. - Attributes: parent: - Required. The scope of the dashboards to list. A project scope - must be specified in the form of - ``"projects/{project_id_or_number}"``. + Required. The scope of the dashboards to list. The format is: + :: projects/[PROJECT_ID_OR_NUMBER] page_size: A positive number that is the maximum number of results to return. If unspecified, a default of 1000 is used. @@ -444,17 +454,16 @@ (_message.Message,), { "DESCRIPTOR": _LISTDASHBOARDSRESPONSE, - "__module__": "google.cloud.monitoring.dashboard_v1.proto.dashboards_service_pb2", + "__module__": "google.cloud.monitoring_dashboard.v1.proto.dashboards_service_pb2", "__doc__": """The ``ListDashboards`` request. - Attributes: dashboards: The list of requested dashboards. next_page_token: If there are more results than have been returned, then this field is set to a non-empty value. To see the additional - results, use that value as ``pageToken`` in the next call to + results, use that value as ``page_token`` in the next call to this method. """, # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.ListDashboardsResponse) @@ -467,16 +476,16 @@ (_message.Message,), { "DESCRIPTOR": _GETDASHBOARDREQUEST, - "__module__": "google.cloud.monitoring.dashboard_v1.proto.dashboards_service_pb2", + "__module__": "google.cloud.monitoring_dashboard.v1.proto.dashboards_service_pb2", "__doc__": """The ``GetDashboard`` request. - Attributes: name: Required. The resource name of the Dashboard. The format is - one of ``"dashboards/{dashboard_id}"`` (for system dashboards) - or ``"projects/{project_id_or_number}/dashboards/{dashboard_id - }"`` (for custom dashboards). + one of: - ``dashboards/[DASHBOARD_ID]`` (for system + dashboards) - + ``projects/[PROJECT_ID_OR_NUMBER]/dashboards/[DASHBOARD_ID]`` + (for custom dashboards). """, # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.GetDashboardRequest) }, @@ -488,14 +497,14 @@ (_message.Message,), { "DESCRIPTOR": _DELETEDASHBOARDREQUEST, - "__module__": "google.cloud.monitoring.dashboard_v1.proto.dashboards_service_pb2", + "__module__": "google.cloud.monitoring_dashboard.v1.proto.dashboards_service_pb2", "__doc__": """The ``DeleteDashboard`` request. - Attributes: name: - Required. The resource name of the Dashboard. The format is - ``"projects/{project_id_or_number}/dashboards/{dashboard_id}"``. + Required. The resource name of the Dashboard. The format is: + :: + projects/[PROJECT_ID_OR_NUMBER]/dashboards/[DASHBOARD_ID] """, # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.DeleteDashboardRequest) }, @@ -507,10 +516,9 @@ (_message.Message,), { "DESCRIPTOR": _UPDATEDASHBOARDREQUEST, - "__module__": "google.cloud.monitoring.dashboard_v1.proto.dashboards_service_pb2", + "__module__": "google.cloud.monitoring_dashboard.v1.proto.dashboards_service_pb2", "__doc__": """The ``UpdateDashboard`` request. - Attributes: dashboard: Required. The dashboard that will replace the existing @@ -535,11 +543,10 @@ full_name="google.monitoring.dashboard.v1.DashboardsService", file=DESCRIPTOR, index=0, - serialized_options=_b( - "\312A\031monitoring.googleapis.com\322A\272\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/monitoring,https://www.googleapis.com/auth/monitoring.read,https://www.googleapis.com/auth/monitoring.write" - ), - serialized_start=808, - serialized_end=1881, + serialized_options=b"\312A\031monitoring.googleapis.com\322A\272\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/monitoring,https://www.googleapis.com/auth/monitoring.read,https://www.googleapis.com/auth/monitoring.write", + create_key=_descriptor._internal_create_key, + serialized_start=964, + serialized_end=2037, methods=[ _descriptor.MethodDescriptor( name="CreateDashboard", @@ -547,10 +554,9 @@ index=0, containing_service=None, input_type=_CREATEDASHBOARDREQUEST, - output_type=google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_dashboard__pb2._DASHBOARD, - serialized_options=_b( - '\202\323\344\223\002/""/v1/{parent=projects/*}/dashboards:\tdashboard' - ), + output_type=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboard__pb2._DASHBOARD, + serialized_options=b'\202\323\344\223\002/""/v1/{parent=projects/*}/dashboards:\tdashboard', + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="ListDashboards", @@ -559,9 +565,8 @@ containing_service=None, input_type=_LISTDASHBOARDSREQUEST, output_type=_LISTDASHBOARDSRESPONSE, - serialized_options=_b( - '\202\323\344\223\002$\022"/v1/{parent=projects/*}/dashboards' - ), + serialized_options=b'\202\323\344\223\002$\022"/v1/{parent=projects/*}/dashboards', + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="GetDashboard", @@ -569,10 +574,9 @@ index=2, containing_service=None, input_type=_GETDASHBOARDREQUEST, - output_type=google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_dashboard__pb2._DASHBOARD, - serialized_options=_b( - '\202\323\344\223\002$\022"/v1/{name=projects/*/dashboards/*}' - ), + output_type=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboard__pb2._DASHBOARD, + serialized_options=b'\202\323\344\223\002$\022"/v1/{name=projects/*/dashboards/*}', + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="DeleteDashboard", @@ -581,9 +585,8 @@ containing_service=None, input_type=_DELETEDASHBOARDREQUEST, output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=_b( - '\202\323\344\223\002$*"/v1/{name=projects/*/dashboards/*}' - ), + serialized_options=b'\202\323\344\223\002$*"/v1/{name=projects/*/dashboards/*}', + create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name="UpdateDashboard", @@ -591,10 +594,9 @@ index=4, containing_service=None, input_type=_UPDATEDASHBOARDREQUEST, - output_type=google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_dashboard__pb2._DASHBOARD, - serialized_options=_b( - "\202\323\344\223\00292,/v1/{dashboard.name=projects/*/dashboards/*}:\tdashboard" - ), + output_type=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboard__pb2._DASHBOARD, + serialized_options=b"\202\323\344\223\00292,/v1/{dashboard.name=projects/*/dashboards/*}:\tdashboard", + create_key=_descriptor._internal_create_key, ), ], ) diff --git a/google/cloud/monitoring_dashboard/v1/proto/dashboards_service_pb2_grpc.py b/google/cloud/monitoring_dashboard/v1/proto/dashboards_service_pb2_grpc.py index 95ea78c..6034a8c 100644 --- a/google/cloud/monitoring_dashboard/v1/proto/dashboards_service_pb2_grpc.py +++ b/google/cloud/monitoring_dashboard/v1/proto/dashboards_service_pb2_grpc.py @@ -1,4 +1,4 @@ -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -17,10 +17,10 @@ import grpc from google.cloud.monitoring_dashboard.v1.proto import ( - dashboard_pb2 as google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_dashboard__pb2, + dashboard_pb2 as google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboard__pb2, ) from google.cloud.monitoring_dashboard.v1.proto import ( - dashboards_service_pb2 as google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_dashboards__service__pb2, + dashboards_service_pb2 as google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboards__service__pb2, ) from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 @@ -38,28 +38,28 @@ def __init__(self, channel): """ self.CreateDashboard = channel.unary_unary( "/google.monitoring.dashboard.v1.DashboardsService/CreateDashboard", - request_serializer=google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_dashboards__service__pb2.CreateDashboardRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_dashboard__pb2.Dashboard.FromString, + request_serializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboards__service__pb2.CreateDashboardRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboard__pb2.Dashboard.FromString, ) self.ListDashboards = channel.unary_unary( "/google.monitoring.dashboard.v1.DashboardsService/ListDashboards", - request_serializer=google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_dashboards__service__pb2.ListDashboardsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_dashboards__service__pb2.ListDashboardsResponse.FromString, + request_serializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboards__service__pb2.ListDashboardsRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboards__service__pb2.ListDashboardsResponse.FromString, ) self.GetDashboard = channel.unary_unary( "/google.monitoring.dashboard.v1.DashboardsService/GetDashboard", - request_serializer=google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_dashboards__service__pb2.GetDashboardRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_dashboard__pb2.Dashboard.FromString, + request_serializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboards__service__pb2.GetDashboardRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboard__pb2.Dashboard.FromString, ) self.DeleteDashboard = channel.unary_unary( "/google.monitoring.dashboard.v1.DashboardsService/DeleteDashboard", - request_serializer=google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_dashboards__service__pb2.DeleteDashboardRequest.SerializeToString, + request_serializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboards__service__pb2.DeleteDashboardRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, ) self.UpdateDashboard = channel.unary_unary( "/google.monitoring.dashboard.v1.DashboardsService/UpdateDashboard", - request_serializer=google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_dashboards__service__pb2.UpdateDashboardRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_dashboard__pb2.Dashboard.FromString, + request_serializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboards__service__pb2.UpdateDashboardRequest.SerializeToString, + response_deserializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboard__pb2.Dashboard.FromString, ) @@ -128,28 +128,28 @@ def add_DashboardsServiceServicer_to_server(servicer, server): rpc_method_handlers = { "CreateDashboard": grpc.unary_unary_rpc_method_handler( servicer.CreateDashboard, - request_deserializer=google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_dashboards__service__pb2.CreateDashboardRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_dashboard__pb2.Dashboard.SerializeToString, + request_deserializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboards__service__pb2.CreateDashboardRequest.FromString, + response_serializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboard__pb2.Dashboard.SerializeToString, ), "ListDashboards": grpc.unary_unary_rpc_method_handler( servicer.ListDashboards, - request_deserializer=google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_dashboards__service__pb2.ListDashboardsRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_dashboards__service__pb2.ListDashboardsResponse.SerializeToString, + request_deserializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboards__service__pb2.ListDashboardsRequest.FromString, + response_serializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboards__service__pb2.ListDashboardsResponse.SerializeToString, ), "GetDashboard": grpc.unary_unary_rpc_method_handler( servicer.GetDashboard, - request_deserializer=google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_dashboards__service__pb2.GetDashboardRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_dashboard__pb2.Dashboard.SerializeToString, + request_deserializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboards__service__pb2.GetDashboardRequest.FromString, + response_serializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboard__pb2.Dashboard.SerializeToString, ), "DeleteDashboard": grpc.unary_unary_rpc_method_handler( servicer.DeleteDashboard, - request_deserializer=google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_dashboards__service__pb2.DeleteDashboardRequest.FromString, + request_deserializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboards__service__pb2.DeleteDashboardRequest.FromString, response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, ), "UpdateDashboard": grpc.unary_unary_rpc_method_handler( servicer.UpdateDashboard, - request_deserializer=google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_dashboards__service__pb2.UpdateDashboardRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_dashboard__pb2.Dashboard.SerializeToString, + request_deserializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboards__service__pb2.UpdateDashboardRequest.FromString, + response_serializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboard__pb2.Dashboard.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( diff --git a/google/cloud/monitoring_dashboard/v1/proto/drilldowns.proto b/google/cloud/monitoring_dashboard/v1/proto/drilldowns.proto new file mode 100644 index 0000000..0080df5 --- /dev/null +++ b/google/cloud/monitoring_dashboard/v1/proto/drilldowns.proto @@ -0,0 +1,25 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.monitoring.dashboard.v1; + +import "google/monitoring/dashboard/v1/common.proto"; + +option go_package = "google.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard"; +option java_multiple_files = true; +option java_outer_classname = "DrilldownsProto"; +option java_package = "com.google.monitoring.dashboard.v1"; +option ruby_package = "Google::Cloud::Monitoring::Dashboard::V1"; diff --git a/google/cloud/monitoring_dashboard/v1/proto/drilldowns_pb2.py b/google/cloud/monitoring_dashboard/v1/proto/drilldowns_pb2.py index 7455803..db83727 100644 --- a/google/cloud/monitoring_dashboard/v1/proto/drilldowns_pb2.py +++ b/google/cloud/monitoring_dashboard/v1/proto/drilldowns_pb2.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,11 +15,8 @@ # limitations under the License. # Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring/dashboard_v1/proto/drilldowns.proto +# source: google/cloud/monitoring_dashboard_v1/proto/drilldowns.proto -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection @@ -30,16 +27,21 @@ _sym_db = _symbol_database.Default() +from google.cloud.monitoring_dashboard.v1.proto import ( + common_pb2 as google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_common__pb2, +) + + DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring/dashboard_v1/proto/drilldowns.proto", + name="google/cloud/monitoring_dashboard_v1/proto/drilldowns.proto", package="google.monitoring.dashboard.v1", syntax="proto3", - serialized_options=_b( - '\n"com.google.monitoring.dashboard.v1B\017DrilldownsProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard' - ), - serialized_pb=_b( - '\n;google/cloud/monitoring/dashboard_v1/proto/drilldowns.proto\x12\x1egoogle.monitoring.dashboard.v1B\x80\x01\n"com.google.monitoring.dashboard.v1B\x0f\x44rilldownsProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboardb\x06proto3' - ), + serialized_options=b'\n"com.google.monitoring.dashboard.v1B\017DrilldownsProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\352\002(Google::Cloud::Monitoring::Dashboard::V1', + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n;google/cloud/monitoring_dashboard_v1/proto/drilldowns.proto\x12\x1egoogle.monitoring.dashboard.v1\x1a\x37google/cloud/monitoring_dashboard_v1/proto/common.protoB\xab\x01\n"com.google.monitoring.dashboard.v1B\x0f\x44rilldownsProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\xea\x02(Google::Cloud::Monitoring::Dashboard::V1b\x06proto3', + dependencies=[ + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_common__pb2.DESCRIPTOR, + ], ) diff --git a/google/cloud/monitoring_dashboard/v1/proto/drilldowns_pb2_grpc.py b/google/cloud/monitoring_dashboard/v1/proto/drilldowns_pb2_grpc.py index 7343170..b662812 100644 --- a/google/cloud/monitoring_dashboard/v1/proto/drilldowns_pb2_grpc.py +++ b/google/cloud/monitoring_dashboard/v1/proto/drilldowns_pb2_grpc.py @@ -1,4 +1,4 @@ -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/monitoring_dashboard/v1/proto/layouts.proto b/google/cloud/monitoring_dashboard/v1/proto/layouts.proto new file mode 100644 index 0000000..acc0517 --- /dev/null +++ b/google/cloud/monitoring_dashboard/v1/proto/layouts.proto @@ -0,0 +1,74 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.monitoring.dashboard.v1; + +import "google/monitoring/dashboard/v1/widget.proto"; + +option go_package = "google.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard"; +option java_multiple_files = true; +option java_outer_classname = "LayoutsProto"; +option java_package = "com.google.monitoring.dashboard.v1"; +option ruby_package = "Google::Cloud::Monitoring::Dashboard::V1"; + +// A basic layout divides the available space into vertical columns of equal +// width and arranges a list of widgets using a row-first strategy. +message GridLayout { + // The number of columns into which the view's width is divided. If omitted + // or set to zero, a system default will be used while rendering. + int64 columns = 1; + + // The informational elements that are arranged into the columns row-first. + repeated Widget widgets = 2; +} + +// A simplified layout that divides the available space into rows +// and arranges a set of widgets horizontally in each row. +message RowLayout { + // Defines the layout properties and content for a row. + message Row { + // The relative weight of this row. The row weight is used to adjust the + // height of rows on the screen (relative to peers). Greater the weight, + // greater the height of the row on the screen. If omitted, a value + // of 1 is used while rendering. + int64 weight = 1; + + // The display widgets arranged horizontally in this row. + repeated Widget widgets = 2; + } + + // The rows of content to display. + repeated Row rows = 1; +} + +// A simplified layout that divides the available space into vertical columns +// and arranges a set of widgets vertically in each column. +message ColumnLayout { + // Defines the layout properties and content for a column. + message Column { + // The relative weight of this column. The column weight is used to adjust + // the width of columns on the screen (relative to peers). + // Greater the weight, greater the width of the column on the screen. + // If omitted, a value of 1 is used while rendering. + int64 weight = 1; + + // The display widgets arranged vertically in this column. + repeated Widget widgets = 2; + } + + // The columns of content to display. + repeated Column columns = 1; +} diff --git a/google/cloud/monitoring_dashboard/v1/proto/layouts_pb2.py b/google/cloud/monitoring_dashboard/v1/proto/layouts_pb2.py index e25436b..1bedef1 100644 --- a/google/cloud/monitoring_dashboard/v1/proto/layouts_pb2.py +++ b/google/cloud/monitoring_dashboard/v1/proto/layouts_pb2.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,11 +15,8 @@ # limitations under the License. # Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring/dashboard_v1/proto/layouts.proto +# source: google/cloud/monitoring_dashboard_v1/proto/layouts.proto -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection @@ -31,22 +28,19 @@ from google.cloud.monitoring_dashboard.v1.proto import ( - widget_pb2 as google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_widget__pb2, + widget_pb2 as google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_widget__pb2, ) DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring/dashboard_v1/proto/layouts.proto", + name="google/cloud/monitoring_dashboard_v1/proto/layouts.proto", package="google.monitoring.dashboard.v1", syntax="proto3", - serialized_options=_b( - '\n"com.google.monitoring.dashboard.v1B\014LayoutsProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard' - ), - serialized_pb=_b( - '\n8google/cloud/monitoring/dashboard_v1/proto/layouts.proto\x12\x1egoogle.monitoring.dashboard.v1\x1a\x37google/cloud/monitoring/dashboard_v1/proto/widget.proto"V\n\nGridLayout\x12\x0f\n\x07\x63olumns\x18\x01 \x01(\x03\x12\x37\n\x07widgets\x18\x02 \x03(\x0b\x32&.google.monitoring.dashboard.v1.Widget"\x98\x01\n\tRowLayout\x12;\n\x04rows\x18\x01 \x03(\x0b\x32-.google.monitoring.dashboard.v1.RowLayout.Row\x1aN\n\x03Row\x12\x0e\n\x06weight\x18\x01 \x01(\x03\x12\x37\n\x07widgets\x18\x02 \x03(\x0b\x32&.google.monitoring.dashboard.v1.Widget"\xa7\x01\n\x0c\x43olumnLayout\x12\x44\n\x07\x63olumns\x18\x01 \x03(\x0b\x32\x33.google.monitoring.dashboard.v1.ColumnLayout.Column\x1aQ\n\x06\x43olumn\x12\x0e\n\x06weight\x18\x01 \x01(\x03\x12\x37\n\x07widgets\x18\x02 \x03(\x0b\x32&.google.monitoring.dashboard.v1.WidgetB}\n"com.google.monitoring.dashboard.v1B\x0cLayoutsProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboardb\x06proto3' - ), + serialized_options=b'\n"com.google.monitoring.dashboard.v1B\014LayoutsProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\352\002(Google::Cloud::Monitoring::Dashboard::V1', + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n8google/cloud/monitoring_dashboard_v1/proto/layouts.proto\x12\x1egoogle.monitoring.dashboard.v1\x1a\x37google/cloud/monitoring_dashboard_v1/proto/widget.proto"V\n\nGridLayout\x12\x0f\n\x07\x63olumns\x18\x01 \x01(\x03\x12\x37\n\x07widgets\x18\x02 \x03(\x0b\x32&.google.monitoring.dashboard.v1.Widget"\x98\x01\n\tRowLayout\x12;\n\x04rows\x18\x01 \x03(\x0b\x32-.google.monitoring.dashboard.v1.RowLayout.Row\x1aN\n\x03Row\x12\x0e\n\x06weight\x18\x01 \x01(\x03\x12\x37\n\x07widgets\x18\x02 \x03(\x0b\x32&.google.monitoring.dashboard.v1.Widget"\xa7\x01\n\x0c\x43olumnLayout\x12\x44\n\x07\x63olumns\x18\x01 \x03(\x0b\x32\x33.google.monitoring.dashboard.v1.ColumnLayout.Column\x1aQ\n\x06\x43olumn\x12\x0e\n\x06weight\x18\x01 \x01(\x03\x12\x37\n\x07widgets\x18\x02 \x03(\x0b\x32&.google.monitoring.dashboard.v1.WidgetB\xa8\x01\n"com.google.monitoring.dashboard.v1B\x0cLayoutsProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\xea\x02(Google::Cloud::Monitoring::Dashboard::V1b\x06proto3', dependencies=[ - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_widget__pb2.DESCRIPTOR + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_widget__pb2.DESCRIPTOR, ], ) @@ -57,6 +51,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="columns", @@ -75,6 +70,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="widgets", @@ -93,6 +89,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -114,6 +111,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="weight", @@ -132,6 +130,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="widgets", @@ -150,6 +149,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -170,6 +170,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="rows", @@ -188,10 +189,11 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + create_key=_descriptor._internal_create_key, + ), ], extensions=[], - nested_types=[_ROWLAYOUT_ROW], + nested_types=[_ROWLAYOUT_ROW,], enum_types=[], serialized_options=None, is_extendable=False, @@ -209,6 +211,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="weight", @@ -227,6 +230,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="widgets", @@ -245,6 +249,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -265,6 +270,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="columns", @@ -283,10 +289,11 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + create_key=_descriptor._internal_create_key, + ), ], extensions=[], - nested_types=[_COLUMNLAYOUT_COLUMN], + nested_types=[_COLUMNLAYOUT_COLUMN,], enum_types=[], serialized_options=None, is_extendable=False, @@ -300,19 +307,19 @@ _GRIDLAYOUT.fields_by_name[ "widgets" ].message_type = ( - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_widget__pb2._WIDGET + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_widget__pb2._WIDGET ) _ROWLAYOUT_ROW.fields_by_name[ "widgets" ].message_type = ( - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_widget__pb2._WIDGET + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_widget__pb2._WIDGET ) _ROWLAYOUT_ROW.containing_type = _ROWLAYOUT _ROWLAYOUT.fields_by_name["rows"].message_type = _ROWLAYOUT_ROW _COLUMNLAYOUT_COLUMN.fields_by_name[ "widgets" ].message_type = ( - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_widget__pb2._WIDGET + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_widget__pb2._WIDGET ) _COLUMNLAYOUT_COLUMN.containing_type = _COLUMNLAYOUT _COLUMNLAYOUT.fields_by_name["columns"].message_type = _COLUMNLAYOUT_COLUMN @@ -326,11 +333,9 @@ (_message.Message,), { "DESCRIPTOR": _GRIDLAYOUT, - "__module__": "google.cloud.monitoring.dashboard_v1.proto.layouts_pb2", - "__doc__": """A basic layout divides the available space into vertical - columns of equal width and arranges a list of widgets using a row-first - strategy. - + "__module__": "google.cloud.monitoring_dashboard.v1.proto.layouts_pb2", + "__doc__": """A basic layout divides the available space into vertical columns of + equal width and arranges a list of widgets using a row-first strategy. Attributes: columns: @@ -355,10 +360,9 @@ (_message.Message,), { "DESCRIPTOR": _ROWLAYOUT_ROW, - "__module__": "google.cloud.monitoring.dashboard_v1.proto.layouts_pb2", + "__module__": "google.cloud.monitoring_dashboard.v1.proto.layouts_pb2", "__doc__": """Defines the layout properties and content for a row. - Attributes: weight: The relative weight of this row. The row weight is used to @@ -372,10 +376,9 @@ }, ), "DESCRIPTOR": _ROWLAYOUT, - "__module__": "google.cloud.monitoring.dashboard_v1.proto.layouts_pb2", - "__doc__": """A simplified layout that divides the available space into - rows and arranges a set of widgets horizontally in each row. - + "__module__": "google.cloud.monitoring_dashboard.v1.proto.layouts_pb2", + "__doc__": """A simplified layout that divides the available space into rows and + arranges a set of widgets horizontally in each row. Attributes: rows: @@ -396,10 +399,9 @@ (_message.Message,), { "DESCRIPTOR": _COLUMNLAYOUT_COLUMN, - "__module__": "google.cloud.monitoring.dashboard_v1.proto.layouts_pb2", + "__module__": "google.cloud.monitoring_dashboard.v1.proto.layouts_pb2", "__doc__": """Defines the layout properties and content for a column. - Attributes: weight: The relative weight of this column. The column weight is used @@ -413,11 +415,9 @@ }, ), "DESCRIPTOR": _COLUMNLAYOUT, - "__module__": "google.cloud.monitoring.dashboard_v1.proto.layouts_pb2", - "__doc__": """A simplified layout that divides the available space into - vertical columns and arranges a set of widgets vertically in each - column. - + "__module__": "google.cloud.monitoring_dashboard.v1.proto.layouts_pb2", + "__doc__": """A simplified layout that divides the available space into vertical + columns and arranges a set of widgets vertically in each column. Attributes: columns: diff --git a/google/cloud/monitoring_dashboard/v1/proto/layouts_pb2_grpc.py b/google/cloud/monitoring_dashboard/v1/proto/layouts_pb2_grpc.py index 7343170..b662812 100644 --- a/google/cloud/monitoring_dashboard/v1/proto/layouts_pb2_grpc.py +++ b/google/cloud/monitoring_dashboard/v1/proto/layouts_pb2_grpc.py @@ -1,4 +1,4 @@ -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/monitoring_dashboard/v1/proto/metrics.proto b/google/cloud/monitoring_dashboard/v1/proto/metrics.proto new file mode 100644 index 0000000..2fff1d2 --- /dev/null +++ b/google/cloud/monitoring_dashboard/v1/proto/metrics.proto @@ -0,0 +1,174 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.monitoring.dashboard.v1; + +import "google/api/field_behavior.proto"; +import "google/monitoring/dashboard/v1/common.proto"; + +option go_package = "google.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard"; +option java_multiple_files = true; +option java_outer_classname = "MetricsProto"; +option java_package = "com.google.monitoring.dashboard.v1"; +option ruby_package = "Google::Cloud::Monitoring::Dashboard::V1"; + +// TimeSeriesQuery collects the set of supported methods for querying time +// series data from the Stackdriver metrics API. +message TimeSeriesQuery { + // Parameters needed to obtain data for the chart. + oneof source { + // Filter parameters to fetch time series. + TimeSeriesFilter time_series_filter = 1; + + // Parameters to fetch a ratio between two time series filters. + TimeSeriesFilterRatio time_series_filter_ratio = 2; + + // A query used to fetch time series. + string time_series_query_language = 3; + } + + // The unit of data contained in fetched time series. If non-empty, this + // unit will override any unit that accompanies fetched data. The format is + // the same as the + // [`unit`](https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.metricDescriptors) + // field in `MetricDescriptor`. + string unit_override = 5; +} + +// A filter that defines a subset of time series data that is displayed in a +// widget. Time series data is fetched using the +// [`ListTimeSeries`](https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.timeSeries/list) +// method. +message TimeSeriesFilter { + // Required. The [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters) + // that identifies the metric types, resources, and projects to query. + string filter = 1 [(google.api.field_behavior) = REQUIRED]; + + // By default, the raw time series data is returned. + // Use this field to combine multiple time series for different views of the + // data. + Aggregation aggregation = 2; + + // Apply a second aggregation after `aggregation` is applied. + Aggregation secondary_aggregation = 3; + + // Selects an optional time series filter. + oneof output_filter { + // Ranking based time series filter. + PickTimeSeriesFilter pick_time_series_filter = 4; + + // Statistics based time series filter. + // Note: This field is deprecated and completely ignored by the API. + StatisticalTimeSeriesFilter statistical_time_series_filter = 5 [deprecated = true]; + } +} + +// A pair of time series filters that define a ratio computation. The output +// time series is the pair-wise division of each aligned element from the +// numerator and denominator time series. +message TimeSeriesFilterRatio { + // Describes a query to build the numerator or denominator of a + // TimeSeriesFilterRatio. + message RatioPart { + // Required. The [monitoring + // filter](https://cloud.google.com/monitoring/api/v3/filters) that + // identifies the metric types, resources, and projects to query. + string filter = 1 [(google.api.field_behavior) = REQUIRED]; + + // By default, the raw time series data is returned. + // Use this field to combine multiple time series for different views of the + // data. + Aggregation aggregation = 2; + } + + // The numerator of the ratio. + RatioPart numerator = 1; + + // The denominator of the ratio. + RatioPart denominator = 2; + + // Apply a second aggregation after the ratio is computed. + Aggregation secondary_aggregation = 3; + + // Selects an optional filter that is applied to the time series after + // computing the ratio. + oneof output_filter { + // Ranking based time series filter. + PickTimeSeriesFilter pick_time_series_filter = 4; + + // Statistics based time series filter. + // Note: This field is deprecated and completely ignored by the API. + StatisticalTimeSeriesFilter statistical_time_series_filter = 5 [deprecated = true]; + } +} + +// Defines a threshold for categorizing time series values. +message Threshold { + // The color suggests an interpretation to the viewer when actual values cross + // the threshold. Comments on each color provide UX guidance on how users can + // be expected to interpret a given state color. + enum Color { + // Color is unspecified. Not allowed in well-formed requests. + COLOR_UNSPECIFIED = 0; + + // Crossing the threshold is "concerning" behavior. + YELLOW = 4; + + // Crossing the threshold is "emergency" behavior. + RED = 6; + } + + // Whether the threshold is considered crossed by an actual value above or + // below its threshold value. + enum Direction { + // Not allowed in well-formed requests. + DIRECTION_UNSPECIFIED = 0; + + // The threshold will be considered crossed if the actual value is above + // the threshold value. + ABOVE = 1; + + // The threshold will be considered crossed if the actual value is below + // the threshold value. + BELOW = 2; + } + + // A label for the threshold. + string label = 1; + + // The value of the threshold. The value should be defined in the native scale + // of the metric. + double value = 2; + + // The state color for this threshold. Color is not allowed in a XyChart. + Color color = 3; + + // The direction for the current threshold. Direction is not allowed in a + // XyChart. + Direction direction = 4; +} + +// Defines the possible types of spark chart supported by the `Scorecard`. +enum SparkChartType { + // Not allowed in well-formed requests. + SPARK_CHART_TYPE_UNSPECIFIED = 0; + + // The sparkline will be rendered as a small line chart. + SPARK_LINE = 1; + + // The sparkbar will be rendered as a small bar chart. + SPARK_BAR = 2; +} diff --git a/google/cloud/monitoring_dashboard/v1/proto/metrics_pb2.py b/google/cloud/monitoring_dashboard/v1/proto/metrics_pb2.py index 96fbeec..8e6a8c4 100644 --- a/google/cloud/monitoring_dashboard/v1/proto/metrics_pb2.py +++ b/google/cloud/monitoring_dashboard/v1/proto/metrics_pb2.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,11 +15,8 @@ # limitations under the License. # Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring/dashboard_v1/proto/metrics.proto +# source: google/cloud/monitoring_dashboard_v1/proto/metrics.proto -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message @@ -33,23 +30,20 @@ from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.cloud.monitoring_dashboard.v1.proto import ( - common_pb2 as google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_common__pb2, + common_pb2 as google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_common__pb2, ) DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring/dashboard_v1/proto/metrics.proto", + name="google/cloud/monitoring_dashboard_v1/proto/metrics.proto", package="google.monitoring.dashboard.v1", syntax="proto3", - serialized_options=_b( - '\n"com.google.monitoring.dashboard.v1B\014MetricsProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard' - ), - serialized_pb=_b( - '\n8google/cloud/monitoring/dashboard_v1/proto/metrics.proto\x12\x1egoogle.monitoring.dashboard.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x37google/cloud/monitoring/dashboard_v1/proto/common.proto"\xdd\x01\n\x0fTimeSeriesQuery\x12N\n\x12time_series_filter\x18\x01 \x01(\x0b\x32\x30.google.monitoring.dashboard.v1.TimeSeriesFilterH\x00\x12Y\n\x18time_series_filter_ratio\x18\x02 \x01(\x0b\x32\x35.google.monitoring.dashboard.v1.TimeSeriesFilterRatioH\x00\x12\x15\n\runit_override\x18\x05 \x01(\tB\x08\n\x06source"\xba\x02\n\x10TimeSeriesFilter\x12\x13\n\x06\x66ilter\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12@\n\x0b\x61ggregation\x18\x02 \x01(\x0b\x32+.google.monitoring.dashboard.v1.Aggregation\x12W\n\x17pick_time_series_filter\x18\x04 \x01(\x0b\x32\x34.google.monitoring.dashboard.v1.PickTimeSeriesFilterH\x00\x12\x65\n\x1estatistical_time_series_filter\x18\x05 \x01(\x0b\x32;.google.monitoring.dashboard.v1.StatisticalTimeSeriesFilterH\x00\x42\x0f\n\routput_filter"\xc2\x04\n\x15TimeSeriesFilterRatio\x12R\n\tnumerator\x18\x01 \x01(\x0b\x32?.google.monitoring.dashboard.v1.TimeSeriesFilterRatio.RatioPart\x12T\n\x0b\x64\x65nominator\x18\x02 \x01(\x0b\x32?.google.monitoring.dashboard.v1.TimeSeriesFilterRatio.RatioPart\x12J\n\x15secondary_aggregation\x18\x03 \x01(\x0b\x32+.google.monitoring.dashboard.v1.Aggregation\x12W\n\x17pick_time_series_filter\x18\x04 \x01(\x0b\x32\x34.google.monitoring.dashboard.v1.PickTimeSeriesFilterH\x00\x12\x65\n\x1estatistical_time_series_filter\x18\x05 \x01(\x0b\x32;.google.monitoring.dashboard.v1.StatisticalTimeSeriesFilterH\x00\x1a\x62\n\tRatioPart\x12\x13\n\x06\x66ilter\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12@\n\x0b\x61ggregation\x18\x02 \x01(\x0b\x32+.google.monitoring.dashboard.v1.AggregationB\x0f\n\routput_filter"\xa4\x02\n\tThreshold\x12\r\n\x05label\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x01\x12>\n\x05\x63olor\x18\x03 \x01(\x0e\x32/.google.monitoring.dashboard.v1.Threshold.Color\x12\x46\n\tdirection\x18\x04 \x01(\x0e\x32\x33.google.monitoring.dashboard.v1.Threshold.Direction"3\n\x05\x43olor\x12\x15\n\x11\x43OLOR_UNSPECIFIED\x10\x00\x12\n\n\x06YELLOW\x10\x04\x12\x07\n\x03RED\x10\x06"<\n\tDirection\x12\x19\n\x15\x44IRECTION_UNSPECIFIED\x10\x00\x12\t\n\x05\x41\x42OVE\x10\x01\x12\t\n\x05\x42\x45LOW\x10\x02*Q\n\x0eSparkChartType\x12 \n\x1cSPARK_CHART_TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nSPARK_LINE\x10\x01\x12\r\n\tSPARK_BAR\x10\x02\x42}\n"com.google.monitoring.dashboard.v1B\x0cMetricsProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboardb\x06proto3' - ), + serialized_options=b'\n"com.google.monitoring.dashboard.v1B\014MetricsProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\352\002(Google::Cloud::Monitoring::Dashboard::V1', + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n8google/cloud/monitoring_dashboard_v1/proto/metrics.proto\x12\x1egoogle.monitoring.dashboard.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x37google/cloud/monitoring_dashboard_v1/proto/common.proto"\x83\x02\n\x0fTimeSeriesQuery\x12N\n\x12time_series_filter\x18\x01 \x01(\x0b\x32\x30.google.monitoring.dashboard.v1.TimeSeriesFilterH\x00\x12Y\n\x18time_series_filter_ratio\x18\x02 \x01(\x0b\x32\x35.google.monitoring.dashboard.v1.TimeSeriesFilterRatioH\x00\x12$\n\x1atime_series_query_language\x18\x03 \x01(\tH\x00\x12\x15\n\runit_override\x18\x05 \x01(\tB\x08\n\x06source"\x8a\x03\n\x10TimeSeriesFilter\x12\x13\n\x06\x66ilter\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12@\n\x0b\x61ggregation\x18\x02 \x01(\x0b\x32+.google.monitoring.dashboard.v1.Aggregation\x12J\n\x15secondary_aggregation\x18\x03 \x01(\x0b\x32+.google.monitoring.dashboard.v1.Aggregation\x12W\n\x17pick_time_series_filter\x18\x04 \x01(\x0b\x32\x34.google.monitoring.dashboard.v1.PickTimeSeriesFilterH\x00\x12i\n\x1estatistical_time_series_filter\x18\x05 \x01(\x0b\x32;.google.monitoring.dashboard.v1.StatisticalTimeSeriesFilterB\x02\x18\x01H\x00\x42\x0f\n\routput_filter"\xc6\x04\n\x15TimeSeriesFilterRatio\x12R\n\tnumerator\x18\x01 \x01(\x0b\x32?.google.monitoring.dashboard.v1.TimeSeriesFilterRatio.RatioPart\x12T\n\x0b\x64\x65nominator\x18\x02 \x01(\x0b\x32?.google.monitoring.dashboard.v1.TimeSeriesFilterRatio.RatioPart\x12J\n\x15secondary_aggregation\x18\x03 \x01(\x0b\x32+.google.monitoring.dashboard.v1.Aggregation\x12W\n\x17pick_time_series_filter\x18\x04 \x01(\x0b\x32\x34.google.monitoring.dashboard.v1.PickTimeSeriesFilterH\x00\x12i\n\x1estatistical_time_series_filter\x18\x05 \x01(\x0b\x32;.google.monitoring.dashboard.v1.StatisticalTimeSeriesFilterB\x02\x18\x01H\x00\x1a\x62\n\tRatioPart\x12\x13\n\x06\x66ilter\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12@\n\x0b\x61ggregation\x18\x02 \x01(\x0b\x32+.google.monitoring.dashboard.v1.AggregationB\x0f\n\routput_filter"\xa4\x02\n\tThreshold\x12\r\n\x05label\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x01\x12>\n\x05\x63olor\x18\x03 \x01(\x0e\x32/.google.monitoring.dashboard.v1.Threshold.Color\x12\x46\n\tdirection\x18\x04 \x01(\x0e\x32\x33.google.monitoring.dashboard.v1.Threshold.Direction"3\n\x05\x43olor\x12\x15\n\x11\x43OLOR_UNSPECIFIED\x10\x00\x12\n\n\x06YELLOW\x10\x04\x12\x07\n\x03RED\x10\x06"<\n\tDirection\x12\x19\n\x15\x44IRECTION_UNSPECIFIED\x10\x00\x12\t\n\x05\x41\x42OVE\x10\x01\x12\t\n\x05\x42\x45LOW\x10\x02*Q\n\x0eSparkChartType\x12 \n\x1cSPARK_CHART_TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nSPARK_LINE\x10\x01\x12\r\n\tSPARK_BAR\x10\x02\x42\xa8\x01\n"com.google.monitoring.dashboard.v1B\x0cMetricsProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\xea\x02(Google::Cloud::Monitoring::Dashboard::V1b\x06proto3', dependencies=[ google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_common__pb2.DESCRIPTOR, + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_common__pb2.DESCRIPTOR, ], ) @@ -58,6 +52,7 @@ full_name="google.monitoring.dashboard.v1.SparkChartType", filename=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name="SPARK_CHART_TYPE_UNSPECIFIED", @@ -65,18 +60,29 @@ number=0, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="SPARK_LINE", index=1, number=1, serialized_options=None, type=None + name="SPARK_LINE", + index=1, + number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="SPARK_BAR", index=2, number=2, serialized_options=None, type=None + name="SPARK_BAR", + index=2, + number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), ], containing_type=None, serialized_options=None, - serialized_start=1599, - serialized_end=1680, + serialized_start=1721, + serialized_end=1802, ) _sym_db.RegisterEnumDescriptor(_SPARKCHARTTYPE) @@ -91,6 +97,7 @@ full_name="google.monitoring.dashboard.v1.Threshold.Color", filename=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name="COLOR_UNSPECIFIED", @@ -98,18 +105,29 @@ number=0, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="YELLOW", index=1, number=4, serialized_options=None, type=None + name="YELLOW", + index=1, + number=4, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="RED", index=2, number=6, serialized_options=None, type=None + name="RED", + index=2, + number=6, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), ], containing_type=None, serialized_options=None, - serialized_start=1484, - serialized_end=1535, + serialized_start=1606, + serialized_end=1657, ) _sym_db.RegisterEnumDescriptor(_THRESHOLD_COLOR) @@ -118,6 +136,7 @@ full_name="google.monitoring.dashboard.v1.Threshold.Direction", filename=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name="DIRECTION_UNSPECIFIED", @@ -125,18 +144,29 @@ number=0, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="ABOVE", index=1, number=1, serialized_options=None, type=None + name="ABOVE", + index=1, + number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="BELOW", index=2, number=2, serialized_options=None, type=None + name="BELOW", + index=2, + number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), ], containing_type=None, serialized_options=None, - serialized_start=1537, - serialized_end=1597, + serialized_start=1659, + serialized_end=1719, ) _sym_db.RegisterEnumDescriptor(_THRESHOLD_DIRECTION) @@ -147,6 +177,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="time_series_filter", @@ -165,6 +196,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="time_series_filter_ratio", @@ -183,17 +215,37 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="time_series_query_language", + full_name="google.monitoring.dashboard.v1.TimeSeriesQuery.time_series_query_language", + index=2, + number=3, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="unit_override", full_name="google.monitoring.dashboard.v1.TimeSeriesQuery.unit_override", - index=2, + index=3, number=5, type=9, cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -201,6 +253,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -216,11 +269,12 @@ full_name="google.monitoring.dashboard.v1.TimeSeriesQuery.source", index=0, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[], - ) + ), ], serialized_start=183, - serialized_end=404, + serialized_end=442, ) @@ -230,6 +284,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="filter", @@ -240,14 +295,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\002"), + serialized_options=b"\340A\002", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="aggregation", @@ -266,11 +322,31 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="secondary_aggregation", + full_name="google.monitoring.dashboard.v1.TimeSeriesFilter.secondary_aggregation", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="pick_time_series_filter", full_name="google.monitoring.dashboard.v1.TimeSeriesFilter.pick_time_series_filter", - index=2, + index=3, number=4, type=11, cpp_type=10, @@ -284,11 +360,12 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="statistical_time_series_filter", full_name="google.monitoring.dashboard.v1.TimeSeriesFilter.statistical_time_series_filter", - index=3, + index=4, number=5, type=11, cpp_type=10, @@ -300,8 +377,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=b"\030\001", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -317,11 +395,12 @@ full_name="google.monitoring.dashboard.v1.TimeSeriesFilter.output_filter", index=0, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[], - ) + ), ], - serialized_start=407, - serialized_end=721, + serialized_start=445, + serialized_end=839, ) @@ -331,6 +410,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="filter", @@ -341,14 +421,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\002"), + serialized_options=b"\340A\002", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="aggregation", @@ -367,6 +448,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -377,8 +459,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1187, - serialized_end=1285, + serialized_start=1309, + serialized_end=1407, ) _TIMESERIESFILTERRATIO = _descriptor.Descriptor( @@ -387,6 +469,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="numerator", @@ -405,6 +488,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="denominator", @@ -423,6 +507,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="secondary_aggregation", @@ -441,6 +526,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="pick_time_series_filter", @@ -459,6 +545,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="statistical_time_series_filter", @@ -475,12 +562,13 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=b"\030\001", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], - nested_types=[_TIMESERIESFILTERRATIO_RATIOPART], + nested_types=[_TIMESERIESFILTERRATIO_RATIOPART,], enum_types=[], serialized_options=None, is_extendable=False, @@ -492,11 +580,12 @@ full_name="google.monitoring.dashboard.v1.TimeSeriesFilterRatio.output_filter", index=0, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[], - ) + ), ], - serialized_start=724, - serialized_end=1302, + serialized_start=842, + serialized_end=1424, ) @@ -506,6 +595,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="label", @@ -516,7 +606,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -524,6 +614,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="value", @@ -542,6 +633,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="color", @@ -560,6 +652,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="direction", @@ -578,18 +671,19 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], nested_types=[], - enum_types=[_THRESHOLD_COLOR, _THRESHOLD_DIRECTION], + enum_types=[_THRESHOLD_COLOR, _THRESHOLD_DIRECTION,], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1305, - serialized_end=1597, + serialized_start=1427, + serialized_end=1719, ) _TIMESERIESQUERY.fields_by_name["time_series_filter"].message_type = _TIMESERIESFILTER @@ -608,20 +702,31 @@ _TIMESERIESQUERY.fields_by_name[ "time_series_filter_ratio" ].containing_oneof = _TIMESERIESQUERY.oneofs_by_name["source"] +_TIMESERIESQUERY.oneofs_by_name["source"].fields.append( + _TIMESERIESQUERY.fields_by_name["time_series_query_language"] +) +_TIMESERIESQUERY.fields_by_name[ + "time_series_query_language" +].containing_oneof = _TIMESERIESQUERY.oneofs_by_name["source"] _TIMESERIESFILTER.fields_by_name[ "aggregation" ].message_type = ( - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_common__pb2._AGGREGATION + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_common__pb2._AGGREGATION +) +_TIMESERIESFILTER.fields_by_name[ + "secondary_aggregation" +].message_type = ( + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_common__pb2._AGGREGATION ) _TIMESERIESFILTER.fields_by_name[ "pick_time_series_filter" ].message_type = ( - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_common__pb2._PICKTIMESERIESFILTER + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_common__pb2._PICKTIMESERIESFILTER ) _TIMESERIESFILTER.fields_by_name[ "statistical_time_series_filter" ].message_type = ( - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_common__pb2._STATISTICALTIMESERIESFILTER + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_common__pb2._STATISTICALTIMESERIESFILTER ) _TIMESERIESFILTER.oneofs_by_name["output_filter"].fields.append( _TIMESERIESFILTER.fields_by_name["pick_time_series_filter"] @@ -638,7 +743,7 @@ _TIMESERIESFILTERRATIO_RATIOPART.fields_by_name[ "aggregation" ].message_type = ( - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_common__pb2._AGGREGATION + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_common__pb2._AGGREGATION ) _TIMESERIESFILTERRATIO_RATIOPART.containing_type = _TIMESERIESFILTERRATIO _TIMESERIESFILTERRATIO.fields_by_name[ @@ -650,17 +755,17 @@ _TIMESERIESFILTERRATIO.fields_by_name[ "secondary_aggregation" ].message_type = ( - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_common__pb2._AGGREGATION + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_common__pb2._AGGREGATION ) _TIMESERIESFILTERRATIO.fields_by_name[ "pick_time_series_filter" ].message_type = ( - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_common__pb2._PICKTIMESERIESFILTER + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_common__pb2._PICKTIMESERIESFILTER ) _TIMESERIESFILTERRATIO.fields_by_name[ "statistical_time_series_filter" ].message_type = ( - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_common__pb2._STATISTICALTIMESERIESFILTER + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_common__pb2._STATISTICALTIMESERIESFILTER ) _TIMESERIESFILTERRATIO.oneofs_by_name["output_filter"].fields.append( _TIMESERIESFILTERRATIO.fields_by_name["pick_time_series_filter"] @@ -690,10 +795,9 @@ (_message.Message,), { "DESCRIPTOR": _TIMESERIESQUERY, - "__module__": "google.cloud.monitoring.dashboard_v1.proto.metrics_pb2", - "__doc__": """TimeSeriesQuery collects the set of supported methods for - querying time series data from the Stackdriver metrics API. - + "__module__": "google.cloud.monitoring_dashboard.v1.proto.metrics_pb2", + "__doc__": """TimeSeriesQuery collects the set of supported methods for querying + time series data from the Stackdriver metrics API. Attributes: source: @@ -702,12 +806,14 @@ Filter parameters to fetch time series. time_series_filter_ratio: Parameters to fetch a ratio between two time series filters. + time_series_query_language: + A query used to fetch time series. unit_override: The unit of data contained in fetched time series. If non- empty, this unit will override any unit that accompanies - fetched data. The format is the same as the ```unit`` - `__ - field in ``MetricDescriptor``. + fetched data. The format is the same as the ```unit`` `__ field in ``MetricDescriptor``. """, # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.TimeSeriesQuery) }, @@ -719,28 +825,30 @@ (_message.Message,), { "DESCRIPTOR": _TIMESERIESFILTER, - "__module__": "google.cloud.monitoring.dashboard_v1.proto.metrics_pb2", - "__doc__": """A filter that defines a subset of time series data that is - displayed in a widget. Time series data is fetched using the - ```ListTimeSeries`` `__ - method. - + "__module__": "google.cloud.monitoring_dashboard.v1.proto.metrics_pb2", + "__doc__": """A filter that defines a subset of time series data that is displayed + in a widget. Time series data is fetched using the ```ListTimeSeries`` + `__ method. Attributes: filter: Required. The `monitoring filter - `__ that identifies the metric - types, resources, and projects to query. + `__ that + identifies the metric types, resources, and projects to query. aggregation: By default, the raw time series data is returned. Use this field to combine multiple time series for different views of the data. + secondary_aggregation: + Apply a second aggregation after ``aggregation`` is applied. output_filter: Selects an optional time series filter. pick_time_series_filter: Ranking based time series filter. statistical_time_series_filter: - Statistics based time series filter. + Statistics based time series filter. Note: This field is + deprecated and completely ignored by the API. """, # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.TimeSeriesFilter) }, @@ -756,16 +864,15 @@ (_message.Message,), { "DESCRIPTOR": _TIMESERIESFILTERRATIO_RATIOPART, - "__module__": "google.cloud.monitoring.dashboard_v1.proto.metrics_pb2", - "__doc__": """Describes a query to build the numerator or denominator of - a TimeSeriesFilterRatio. - + "__module__": "google.cloud.monitoring_dashboard.v1.proto.metrics_pb2", + "__doc__": """Describes a query to build the numerator or denominator of a + TimeSeriesFilterRatio. Attributes: filter: Required. The `monitoring filter - `__ that identifies the metric - types, resources, and projects to query. + `__ that + identifies the metric types, resources, and projects to query. aggregation: By default, the raw time series data is returned. Use this field to combine multiple time series for different views of @@ -775,11 +882,10 @@ }, ), "DESCRIPTOR": _TIMESERIESFILTERRATIO, - "__module__": "google.cloud.monitoring.dashboard_v1.proto.metrics_pb2", - "__doc__": """A pair of time series filters that define a ratio - computation. The output time series is the pair-wise division of each - aligned element from the numerator and denominator time series. - + "__module__": "google.cloud.monitoring_dashboard.v1.proto.metrics_pb2", + "__doc__": """A pair of time series filters that define a ratio computation. The + output time series is the pair-wise division of each aligned element + from the numerator and denominator time series. Attributes: numerator: @@ -794,7 +900,8 @@ pick_time_series_filter: Ranking based time series filter. statistical_time_series_filter: - Statistics based time series filter. + Statistics based time series filter. Note: This field is + deprecated and completely ignored by the API. """, # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.TimeSeriesFilterRatio) }, @@ -807,10 +914,9 @@ (_message.Message,), { "DESCRIPTOR": _THRESHOLD, - "__module__": "google.cloud.monitoring.dashboard_v1.proto.metrics_pb2", + "__module__": "google.cloud.monitoring_dashboard.v1.proto.metrics_pb2", "__doc__": """Defines a threshold for categorizing time series values. - Attributes: label: A label for the threshold. @@ -832,5 +938,7 @@ DESCRIPTOR._options = None _TIMESERIESFILTER.fields_by_name["filter"]._options = None +_TIMESERIESFILTER.fields_by_name["statistical_time_series_filter"]._options = None _TIMESERIESFILTERRATIO_RATIOPART.fields_by_name["filter"]._options = None +_TIMESERIESFILTERRATIO.fields_by_name["statistical_time_series_filter"]._options = None # @@protoc_insertion_point(module_scope) diff --git a/google/cloud/monitoring_dashboard/v1/proto/metrics_pb2_grpc.py b/google/cloud/monitoring_dashboard/v1/proto/metrics_pb2_grpc.py index 7343170..b662812 100644 --- a/google/cloud/monitoring_dashboard/v1/proto/metrics_pb2_grpc.py +++ b/google/cloud/monitoring_dashboard/v1/proto/metrics_pb2_grpc.py @@ -1,4 +1,4 @@ -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/monitoring_dashboard/v1/proto/scorecard.proto b/google/cloud/monitoring_dashboard/v1/proto/scorecard.proto new file mode 100644 index 0000000..1820c03 --- /dev/null +++ b/google/cloud/monitoring_dashboard/v1/proto/scorecard.proto @@ -0,0 +1,111 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.monitoring.dashboard.v1; + +import "google/api/field_behavior.proto"; +import "google/monitoring/dashboard/v1/metrics.proto"; +import "google/protobuf/duration.proto"; + +option go_package = "google.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard"; +option java_multiple_files = true; +option java_outer_classname = "ScorecardProto"; +option java_package = "com.google.monitoring.dashboard.v1"; +option ruby_package = "Google::Cloud::Monitoring::Dashboard::V1"; + +// A widget showing the latest value of a metric, and how this value relates to +// one or more thresholds. +message Scorecard { + // A gauge chart shows where the current value sits within a pre-defined + // range. The upper and lower bounds should define the possible range of + // values for the scorecard's query (inclusive). + message GaugeView { + // The lower bound for this gauge chart. The value of the chart should + // always be greater than or equal to this. + double lower_bound = 1; + + // The upper bound for this gauge chart. The value of the chart should + // always be less than or equal to this. + double upper_bound = 2; + } + + // A sparkChart is a small chart suitable for inclusion in a table-cell or + // inline in text. This message contains the configuration for a sparkChart + // to show up on a Scorecard, showing recent trends of the scorecard's + // timeseries. + message SparkChartView { + // Required. The type of sparkchart to show in this chartView. + SparkChartType spark_chart_type = 1 [(google.api.field_behavior) = REQUIRED]; + + // The lower bound on data point frequency in the chart implemented by + // specifying the minimum alignment period to use in a time series query. + // For example, if the data is published once every 10 minutes it would not + // make sense to fetch and align data at one minute intervals. This field is + // optional and exists only as a hint. + google.protobuf.Duration min_alignment_period = 2; + } + + // Required. Fields for querying time series data from the + // Stackdriver metrics API. + TimeSeriesQuery time_series_query = 1 [(google.api.field_behavior) = REQUIRED]; + + // Defines the optional additional chart shown on the scorecard. If + // neither is included - then a default scorecard is shown. + oneof data_view { + // Will cause the scorecard to show a gauge chart. + GaugeView gauge_view = 4; + + // Will cause the scorecard to show a spark chart. + SparkChartView spark_chart_view = 5; + } + + // The thresholds used to determine the state of the scorecard given the + // time series' current value. For an actual value x, the scorecard is in a + // danger state if x is less than or equal to a danger threshold that triggers + // below, or greater than or equal to a danger threshold that triggers above. + // Similarly, if x is above/below a warning threshold that triggers + // above/below, then the scorecard is in a warning state - unless x also puts + // it in a danger state. (Danger trumps warning.) + // + // As an example, consider a scorecard with the following four thresholds: + // { + // value: 90, + // category: 'DANGER', + // trigger: 'ABOVE', + // }, + // { + // value: 70, + // category: 'WARNING', + // trigger: 'ABOVE', + // }, + // { + // value: 10, + // category: 'DANGER', + // trigger: 'BELOW', + // }, + // { + // value: 20, + // category: 'WARNING', + // trigger: 'BELOW', + // } + // + // Then: values less than or equal to 10 would put the scorecard in a DANGER + // state, values greater than 10 but less than or equal to 20 a WARNING state, + // values strictly between 20 and 70 an OK state, values greater than or equal + // to 70 but less than 90 a WARNING state, and values greater than or equal to + // 90 a DANGER state. + repeated Threshold thresholds = 6; +} diff --git a/google/cloud/monitoring_dashboard/v1/proto/scorecard_pb2.py b/google/cloud/monitoring_dashboard/v1/proto/scorecard_pb2.py index 336ccd6..be5f60c 100644 --- a/google/cloud/monitoring_dashboard/v1/proto/scorecard_pb2.py +++ b/google/cloud/monitoring_dashboard/v1/proto/scorecard_pb2.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,11 +15,8 @@ # limitations under the License. # Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring/dashboard_v1/proto/scorecard.proto +# source: google/cloud/monitoring_dashboard_v1/proto/scorecard.proto -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection @@ -30,24 +27,23 @@ _sym_db = _symbol_database.Default() +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.cloud.monitoring_dashboard.v1.proto import ( - metrics_pb2 as google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_metrics__pb2, + metrics_pb2 as google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_metrics__pb2, ) from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring/dashboard_v1/proto/scorecard.proto", + name="google/cloud/monitoring_dashboard_v1/proto/scorecard.proto", package="google.monitoring.dashboard.v1", syntax="proto3", - serialized_options=_b( - '\n"com.google.monitoring.dashboard.v1B\016ScorecardProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard' - ), - serialized_pb=_b( - '\n:google/cloud/monitoring/dashboard_v1/proto/scorecard.proto\x12\x1egoogle.monitoring.dashboard.v1\x1a\x38google/cloud/monitoring/dashboard_v1/proto/metrics.proto\x1a\x1egoogle/protobuf/duration.proto"\x91\x04\n\tScorecard\x12J\n\x11time_series_query\x18\x01 \x01(\x0b\x32/.google.monitoring.dashboard.v1.TimeSeriesQuery\x12I\n\ngauge_view\x18\x04 \x01(\x0b\x32\x33.google.monitoring.dashboard.v1.Scorecard.GaugeViewH\x00\x12T\n\x10spark_chart_view\x18\x05 \x01(\x0b\x32\x38.google.monitoring.dashboard.v1.Scorecard.SparkChartViewH\x00\x12=\n\nthresholds\x18\x06 \x03(\x0b\x32).google.monitoring.dashboard.v1.Threshold\x1a\x35\n\tGaugeView\x12\x13\n\x0blower_bound\x18\x01 \x01(\x01\x12\x13\n\x0bupper_bound\x18\x02 \x01(\x01\x1a\x93\x01\n\x0eSparkChartView\x12H\n\x10spark_chart_type\x18\x01 \x01(\x0e\x32..google.monitoring.dashboard.v1.SparkChartType\x12\x37\n\x14min_alignment_period\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationB\x0b\n\tdata_viewB\x7f\n"com.google.monitoring.dashboard.v1B\x0eScorecardProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboardb\x06proto3' - ), + serialized_options=b'\n"com.google.monitoring.dashboard.v1B\016ScorecardProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\352\002(Google::Cloud::Monitoring::Dashboard::V1', + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n:google/cloud/monitoring_dashboard_v1/proto/scorecard.proto\x12\x1egoogle.monitoring.dashboard.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x38google/cloud/monitoring_dashboard_v1/proto/metrics.proto\x1a\x1egoogle/protobuf/duration.proto"\x9b\x04\n\tScorecard\x12O\n\x11time_series_query\x18\x01 \x01(\x0b\x32/.google.monitoring.dashboard.v1.TimeSeriesQueryB\x03\xe0\x41\x02\x12I\n\ngauge_view\x18\x04 \x01(\x0b\x32\x33.google.monitoring.dashboard.v1.Scorecard.GaugeViewH\x00\x12T\n\x10spark_chart_view\x18\x05 \x01(\x0b\x32\x38.google.monitoring.dashboard.v1.Scorecard.SparkChartViewH\x00\x12=\n\nthresholds\x18\x06 \x03(\x0b\x32).google.monitoring.dashboard.v1.Threshold\x1a\x35\n\tGaugeView\x12\x13\n\x0blower_bound\x18\x01 \x01(\x01\x12\x13\n\x0bupper_bound\x18\x02 \x01(\x01\x1a\x98\x01\n\x0eSparkChartView\x12M\n\x10spark_chart_type\x18\x01 \x01(\x0e\x32..google.monitoring.dashboard.v1.SparkChartTypeB\x03\xe0\x41\x02\x12\x37\n\x14min_alignment_period\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationB\x0b\n\tdata_viewB\xaa\x01\n"com.google.monitoring.dashboard.v1B\x0eScorecardProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\xea\x02(Google::Cloud::Monitoring::Dashboard::V1b\x06proto3', dependencies=[ - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_metrics__pb2.DESCRIPTOR, + google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_metrics__pb2.DESCRIPTOR, google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, ], ) @@ -59,6 +55,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="lower_bound", @@ -77,6 +74,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="upper_bound", @@ -95,6 +93,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -105,8 +104,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=498, - serialized_end=551, + serialized_start=536, + serialized_end=589, ) _SCORECARD_SPARKCHARTVIEW = _descriptor.Descriptor( @@ -115,6 +114,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="spark_chart_type", @@ -131,8 +131,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=b"\340A\002", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="min_alignment_period", @@ -151,6 +152,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -161,8 +163,8 @@ syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=554, - serialized_end=701, + serialized_start=592, + serialized_end=744, ) _SCORECARD = _descriptor.Descriptor( @@ -171,6 +173,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="time_series_query", @@ -187,8 +190,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=b"\340A\002", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="gauge_view", @@ -207,6 +211,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="spark_chart_view", @@ -225,6 +230,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="thresholds", @@ -243,10 +249,11 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], - nested_types=[_SCORECARD_GAUGEVIEW, _SCORECARD_SPARKCHARTVIEW], + nested_types=[_SCORECARD_GAUGEVIEW, _SCORECARD_SPARKCHARTVIEW,], enum_types=[], serialized_options=None, is_extendable=False, @@ -258,18 +265,19 @@ full_name="google.monitoring.dashboard.v1.Scorecard.data_view", index=0, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[], - ) + ), ], - serialized_start=185, - serialized_end=714, + serialized_start=218, + serialized_end=757, ) _SCORECARD_GAUGEVIEW.containing_type = _SCORECARD _SCORECARD_SPARKCHARTVIEW.fields_by_name[ "spark_chart_type" ].enum_type = ( - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_metrics__pb2._SPARKCHARTTYPE + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_metrics__pb2._SPARKCHARTTYPE ) _SCORECARD_SPARKCHARTVIEW.fields_by_name[ "min_alignment_period" @@ -278,14 +286,14 @@ _SCORECARD.fields_by_name[ "time_series_query" ].message_type = ( - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_metrics__pb2._TIMESERIESQUERY + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_metrics__pb2._TIMESERIESQUERY ) _SCORECARD.fields_by_name["gauge_view"].message_type = _SCORECARD_GAUGEVIEW _SCORECARD.fields_by_name["spark_chart_view"].message_type = _SCORECARD_SPARKCHARTVIEW _SCORECARD.fields_by_name[ "thresholds" ].message_type = ( - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_metrics__pb2._THRESHOLD + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_metrics__pb2._THRESHOLD ) _SCORECARD.oneofs_by_name["data_view"].fields.append( _SCORECARD.fields_by_name["gauge_view"] @@ -311,11 +319,10 @@ (_message.Message,), { "DESCRIPTOR": _SCORECARD_GAUGEVIEW, - "__module__": "google.cloud.monitoring.dashboard_v1.proto.scorecard_pb2", - "__doc__": """A gauge chart shows where the current value sits within a - pre-defined range. The upper and lower bounds should define the possible - range of values for the scorecard’s query (inclusive). - + "__module__": "google.cloud.monitoring_dashboard.v1.proto.scorecard_pb2", + "__doc__": """A gauge chart shows where the current value sits within a pre-defined + range. The upper and lower bounds should define the possible range of + values for the scorecard’s query (inclusive). Attributes: lower_bound: @@ -333,16 +340,15 @@ (_message.Message,), { "DESCRIPTOR": _SCORECARD_SPARKCHARTVIEW, - "__module__": "google.cloud.monitoring.dashboard_v1.proto.scorecard_pb2", - "__doc__": """A sparkChart is a small chart suitable for inclusion in a - table-cell or inline in text. This message contains the configuration - for a sparkChart to show up on a Scorecard, showing recent trends of the + "__module__": "google.cloud.monitoring_dashboard.v1.proto.scorecard_pb2", + "__doc__": """A sparkChart is a small chart suitable for inclusion in a table-cell + or inline in text. This message contains the configuration for a + sparkChart to show up on a Scorecard, showing recent trends of the scorecard’s timeseries. - Attributes: spark_chart_type: - The type of sparkchart to show in this chartView. + Required. The type of sparkchart to show in this chartView. min_alignment_period: The lower bound on data point frequency in the chart implemented by specifying the minimum alignment period to use @@ -355,15 +361,14 @@ }, ), "DESCRIPTOR": _SCORECARD, - "__module__": "google.cloud.monitoring.dashboard_v1.proto.scorecard_pb2", - "__doc__": """A widget showing the latest value of a metric, and how - this value relates to one or more thresholds. - + "__module__": "google.cloud.monitoring_dashboard.v1.proto.scorecard_pb2", + "__doc__": """A widget showing the latest value of a metric, and how this value + relates to one or more thresholds. Attributes: time_series_query: - Fields for querying time series data from the Stackdriver - metrics API. + Required. Fields for querying time series data from the + Stackdriver metrics API. data_view: Defines the optional additional chart shown on the scorecard. If neither is included - then a default scorecard is shown. @@ -400,4 +405,6 @@ DESCRIPTOR._options = None +_SCORECARD_SPARKCHARTVIEW.fields_by_name["spark_chart_type"]._options = None +_SCORECARD.fields_by_name["time_series_query"]._options = None # @@protoc_insertion_point(module_scope) diff --git a/google/cloud/monitoring_dashboard/v1/proto/scorecard_pb2_grpc.py b/google/cloud/monitoring_dashboard/v1/proto/scorecard_pb2_grpc.py index 7343170..b662812 100644 --- a/google/cloud/monitoring_dashboard/v1/proto/scorecard_pb2_grpc.py +++ b/google/cloud/monitoring_dashboard/v1/proto/scorecard_pb2_grpc.py @@ -1,4 +1,4 @@ -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/monitoring_dashboard/v1/proto/service.proto b/google/cloud/monitoring_dashboard/v1/proto/service.proto new file mode 100644 index 0000000..5bb41ec --- /dev/null +++ b/google/cloud/monitoring_dashboard/v1/proto/service.proto @@ -0,0 +1,23 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.monitoring.dashboard.v1; + +option go_package = "google.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard"; +option java_multiple_files = true; +option java_outer_classname = "ServiceMonitoringProto"; +option java_package = "com.google.monitoring.dashboard.v1"; +option ruby_package = "Google::Cloud::Monitoring::Dashboard::V1"; diff --git a/google/cloud/monitoring_dashboard/v1/proto/service_pb2.py b/google/cloud/monitoring_dashboard/v1/proto/service_pb2.py index cb61c0f..05d5e26 100644 --- a/google/cloud/monitoring_dashboard/v1/proto/service_pb2.py +++ b/google/cloud/monitoring_dashboard/v1/proto/service_pb2.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,11 +15,8 @@ # limitations under the License. # Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring/dashboard_v1/proto/service.proto +# source: google/cloud/monitoring_dashboard_v1/proto/service.proto -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection @@ -31,15 +28,12 @@ DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring/dashboard_v1/proto/service.proto", + name="google/cloud/monitoring_dashboard_v1/proto/service.proto", package="google.monitoring.dashboard.v1", syntax="proto3", - serialized_options=_b( - '\n"com.google.monitoring.dashboard.v1B\026ServiceMonitoringProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard' - ), - serialized_pb=_b( - '\n8google/cloud/monitoring/dashboard_v1/proto/service.proto\x12\x1egoogle.monitoring.dashboard.v1B\x87\x01\n"com.google.monitoring.dashboard.v1B\x16ServiceMonitoringProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboardb\x06proto3' - ), + serialized_options=b'\n"com.google.monitoring.dashboard.v1B\026ServiceMonitoringProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\352\002(Google::Cloud::Monitoring::Dashboard::V1', + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n8google/cloud/monitoring_dashboard_v1/proto/service.proto\x12\x1egoogle.monitoring.dashboard.v1B\xb2\x01\n"com.google.monitoring.dashboard.v1B\x16ServiceMonitoringProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\xea\x02(Google::Cloud::Monitoring::Dashboard::V1b\x06proto3', ) diff --git a/google/cloud/monitoring_dashboard/v1/proto/service_pb2_grpc.py b/google/cloud/monitoring_dashboard/v1/proto/service_pb2_grpc.py index 7343170..b662812 100644 --- a/google/cloud/monitoring_dashboard/v1/proto/service_pb2_grpc.py +++ b/google/cloud/monitoring_dashboard/v1/proto/service_pb2_grpc.py @@ -1,4 +1,4 @@ -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/monitoring_dashboard/v1/proto/text.proto b/google/cloud/monitoring_dashboard/v1/proto/text.proto new file mode 100644 index 0000000..acc1671 --- /dev/null +++ b/google/cloud/monitoring_dashboard/v1/proto/text.proto @@ -0,0 +1,44 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.monitoring.dashboard.v1; + +option go_package = "google.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard"; +option java_multiple_files = true; +option java_outer_classname = "TextProto"; +option java_package = "com.google.monitoring.dashboard.v1"; +option ruby_package = "Google::Cloud::Monitoring::Dashboard::V1"; + +// A widget that displays textual content. +message Text { + // The format type of the text content. + enum Format { + // Format is unspecified. Defaults to MARKDOWN. + FORMAT_UNSPECIFIED = 0; + + // The text contains Markdown formatting. + MARKDOWN = 1; + + // The text contains no special formatting. + RAW = 2; + } + + // The text content to be displayed. + string content = 1; + + // How the text content is formatted. + Format format = 2; +} diff --git a/google/cloud/monitoring_dashboard/v1/proto/text_pb2.py b/google/cloud/monitoring_dashboard/v1/proto/text_pb2.py index c707698..d121080 100644 --- a/google/cloud/monitoring_dashboard/v1/proto/text_pb2.py +++ b/google/cloud/monitoring_dashboard/v1/proto/text_pb2.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,11 +15,8 @@ # limitations under the License. # Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring/dashboard_v1/proto/text.proto +# source: google/cloud/monitoring_dashboard_v1/proto/text.proto -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection @@ -31,15 +28,12 @@ DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring/dashboard_v1/proto/text.proto", + name="google/cloud/monitoring_dashboard_v1/proto/text.proto", package="google.monitoring.dashboard.v1", syntax="proto3", - serialized_options=_b( - '\n"com.google.monitoring.dashboard.v1B\tTextProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard' - ), - serialized_pb=_b( - '\n5google/cloud/monitoring/dashboard_v1/proto/text.proto\x12\x1egoogle.monitoring.dashboard.v1"\x8d\x01\n\x04Text\x12\x0f\n\x07\x63ontent\x18\x01 \x01(\t\x12;\n\x06\x66ormat\x18\x02 \x01(\x0e\x32+.google.monitoring.dashboard.v1.Text.Format"7\n\x06\x46ormat\x12\x16\n\x12\x46ORMAT_UNSPECIFIED\x10\x00\x12\x0c\n\x08MARKDOWN\x10\x01\x12\x07\n\x03RAW\x10\x02\x42z\n"com.google.monitoring.dashboard.v1B\tTextProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboardb\x06proto3' - ), + serialized_options=b'\n"com.google.monitoring.dashboard.v1B\tTextProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\352\002(Google::Cloud::Monitoring::Dashboard::V1', + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n5google/cloud/monitoring_dashboard_v1/proto/text.proto\x12\x1egoogle.monitoring.dashboard.v1"\x8d\x01\n\x04Text\x12\x0f\n\x07\x63ontent\x18\x01 \x01(\t\x12;\n\x06\x66ormat\x18\x02 \x01(\x0e\x32+.google.monitoring.dashboard.v1.Text.Format"7\n\x06\x46ormat\x12\x16\n\x12\x46ORMAT_UNSPECIFIED\x10\x00\x12\x0c\n\x08MARKDOWN\x10\x01\x12\x07\n\x03RAW\x10\x02\x42\xa5\x01\n"com.google.monitoring.dashboard.v1B\tTextProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\xea\x02(Google::Cloud::Monitoring::Dashboard::V1b\x06proto3', ) @@ -48,6 +42,7 @@ full_name="google.monitoring.dashboard.v1.Text.Format", filename=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name="FORMAT_UNSPECIFIED", @@ -55,12 +50,23 @@ number=0, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="MARKDOWN", index=1, number=1, serialized_options=None, type=None + name="MARKDOWN", + index=1, + number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="RAW", index=2, number=2, serialized_options=None, type=None + name="RAW", + index=2, + number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), ], containing_type=None, @@ -77,6 +83,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="content", @@ -87,7 +94,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -95,6 +102,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="format", @@ -113,11 +121,12 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], nested_types=[], - enum_types=[_TEXT_FORMAT], + enum_types=[_TEXT_FORMAT,], serialized_options=None, is_extendable=False, syntax="proto3", @@ -137,10 +146,9 @@ (_message.Message,), { "DESCRIPTOR": _TEXT, - "__module__": "google.cloud.monitoring.dashboard_v1.proto.text_pb2", + "__module__": "google.cloud.monitoring_dashboard.v1.proto.text_pb2", "__doc__": """A widget that displays textual content. - Attributes: content: The text content to be displayed. diff --git a/google/cloud/monitoring_dashboard/v1/proto/text_pb2_grpc.py b/google/cloud/monitoring_dashboard/v1/proto/text_pb2_grpc.py index 7343170..b662812 100644 --- a/google/cloud/monitoring_dashboard/v1/proto/text_pb2_grpc.py +++ b/google/cloud/monitoring_dashboard/v1/proto/text_pb2_grpc.py @@ -1,4 +1,4 @@ -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/monitoring_dashboard/v1/proto/widget.proto b/google/cloud/monitoring_dashboard/v1/proto/widget.proto new file mode 100644 index 0000000..12b5a69 --- /dev/null +++ b/google/cloud/monitoring_dashboard/v1/proto/widget.proto @@ -0,0 +1,51 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.monitoring.dashboard.v1; + +import "google/api/field_behavior.proto"; +import "google/monitoring/dashboard/v1/scorecard.proto"; +import "google/monitoring/dashboard/v1/text.proto"; +import "google/monitoring/dashboard/v1/xychart.proto"; +import "google/protobuf/empty.proto"; + +option go_package = "google.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard"; +option java_multiple_files = true; +option java_outer_classname = "WidgetProto"; +option java_package = "com.google.monitoring.dashboard.v1"; +option ruby_package = "Google::Cloud::Monitoring::Dashboard::V1"; + +// Widget contains a single dashboard component and configuration of how to +// present the component in the dashboard. +message Widget { + // Optional. The title of the widget. + string title = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Content defines the component used to populate the widget. + oneof content { + // A chart of time series data. + XyChart xy_chart = 2; + + // A scorecard summarizing time series data. + Scorecard scorecard = 3; + + // A raw string or markdown displaying textual content. + Text text = 4; + + // A blank space. + google.protobuf.Empty blank = 5; + } +} diff --git a/google/cloud/monitoring_dashboard/v1/proto/widget_pb2.py b/google/cloud/monitoring_dashboard/v1/proto/widget_pb2.py index ad2df70..881e922 100644 --- a/google/cloud/monitoring_dashboard/v1/proto/widget_pb2.py +++ b/google/cloud/monitoring_dashboard/v1/proto/widget_pb2.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,11 +15,8 @@ # limitations under the License. # Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring/dashboard_v1/proto/widget.proto +# source: google/cloud/monitoring_dashboard_v1/proto/widget.proto -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection @@ -32,32 +29,29 @@ from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.cloud.monitoring_dashboard.v1.proto import ( - scorecard_pb2 as google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_scorecard__pb2, + scorecard_pb2 as google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_scorecard__pb2, ) from google.cloud.monitoring_dashboard.v1.proto import ( - text_pb2 as google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_text__pb2, + text_pb2 as google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_text__pb2, ) from google.cloud.monitoring_dashboard.v1.proto import ( - xychart_pb2 as google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_xychart__pb2, + xychart_pb2 as google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_xychart__pb2, ) from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring/dashboard_v1/proto/widget.proto", + name="google/cloud/monitoring_dashboard_v1/proto/widget.proto", package="google.monitoring.dashboard.v1", syntax="proto3", - serialized_options=_b( - '\n"com.google.monitoring.dashboard.v1B\013WidgetProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard' - ), - serialized_pb=_b( - "\n7google/cloud/monitoring/dashboard_v1/proto/widget.proto\x12\x1egoogle.monitoring.dashboard.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a:google/cloud/monitoring/dashboard_v1/proto/scorecard.proto\x1a\x35google/cloud/monitoring/dashboard_v1/proto/text.proto\x1a\x38google/cloud/monitoring/dashboard_v1/proto/xychart.proto\x1a\x1bgoogle/protobuf/empty.proto\"\x83\x02\n\x06Widget\x12\x12\n\x05title\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12;\n\x08xy_chart\x18\x02 \x01(\x0b\x32'.google.monitoring.dashboard.v1.XyChartH\x00\x12>\n\tscorecard\x18\x03 \x01(\x0b\x32).google.monitoring.dashboard.v1.ScorecardH\x00\x12\x34\n\x04text\x18\x04 \x01(\x0b\x32$.google.monitoring.dashboard.v1.TextH\x00\x12'\n\x05\x62lank\x18\x05 \x01(\x0b\x32\x16.google.protobuf.EmptyH\x00\x42\t\n\x07\x63ontentB|\n\"com.google.monitoring.dashboard.v1B\x0bWidgetProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboardb\x06proto3" - ), + serialized_options=b'\n"com.google.monitoring.dashboard.v1B\013WidgetProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\352\002(Google::Cloud::Monitoring::Dashboard::V1', + create_key=_descriptor._internal_create_key, + serialized_pb=b"\n7google/cloud/monitoring_dashboard_v1/proto/widget.proto\x12\x1egoogle.monitoring.dashboard.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a:google/cloud/monitoring_dashboard_v1/proto/scorecard.proto\x1a\x35google/cloud/monitoring_dashboard_v1/proto/text.proto\x1a\x38google/cloud/monitoring_dashboard_v1/proto/xychart.proto\x1a\x1bgoogle/protobuf/empty.proto\"\x83\x02\n\x06Widget\x12\x12\n\x05title\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12;\n\x08xy_chart\x18\x02 \x01(\x0b\x32'.google.monitoring.dashboard.v1.XyChartH\x00\x12>\n\tscorecard\x18\x03 \x01(\x0b\x32).google.monitoring.dashboard.v1.ScorecardH\x00\x12\x34\n\x04text\x18\x04 \x01(\x0b\x32$.google.monitoring.dashboard.v1.TextH\x00\x12'\n\x05\x62lank\x18\x05 \x01(\x0b\x32\x16.google.protobuf.EmptyH\x00\x42\t\n\x07\x63ontentB\xa7\x01\n\"com.google.monitoring.dashboard.v1B\x0bWidgetProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\xea\x02(Google::Cloud::Monitoring::Dashboard::V1b\x06proto3", dependencies=[ google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_scorecard__pb2.DESCRIPTOR, - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_text__pb2.DESCRIPTOR, - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_xychart__pb2.DESCRIPTOR, + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_scorecard__pb2.DESCRIPTOR, + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_text__pb2.DESCRIPTOR, + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_xychart__pb2.DESCRIPTOR, google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, ], ) @@ -69,6 +63,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="title", @@ -79,14 +74,15 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\001"), + serialized_options=b"\340A\001", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="xy_chart", @@ -105,6 +101,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="scorecard", @@ -123,6 +120,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="text", @@ -141,6 +139,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="blank", @@ -159,6 +158,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], @@ -174,8 +174,9 @@ full_name="google.monitoring.dashboard.v1.Widget.content", index=0, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[], - ) + ), ], serialized_start=327, serialized_end=586, @@ -184,17 +185,17 @@ _WIDGET.fields_by_name[ "xy_chart" ].message_type = ( - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_xychart__pb2._XYCHART + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_xychart__pb2._XYCHART ) _WIDGET.fields_by_name[ "scorecard" ].message_type = ( - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_scorecard__pb2._SCORECARD + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_scorecard__pb2._SCORECARD ) _WIDGET.fields_by_name[ "text" ].message_type = ( - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_text__pb2._TEXT + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_text__pb2._TEXT ) _WIDGET.fields_by_name["blank"].message_type = google_dot_protobuf_dot_empty__pb2._EMPTY _WIDGET.oneofs_by_name["content"].fields.append(_WIDGET.fields_by_name["xy_chart"]) @@ -213,10 +214,9 @@ (_message.Message,), { "DESCRIPTOR": _WIDGET, - "__module__": "google.cloud.monitoring.dashboard_v1.proto.widget_pb2", - "__doc__": """Widget contains a single dashboard component and - configuration of how to present the component in the dashboard. - + "__module__": "google.cloud.monitoring_dashboard.v1.proto.widget_pb2", + "__doc__": """Widget contains a single dashboard component and configuration of how + to present the component in the dashboard. Attributes: title: diff --git a/google/cloud/monitoring_dashboard/v1/proto/widget_pb2_grpc.py b/google/cloud/monitoring_dashboard/v1/proto/widget_pb2_grpc.py index 7343170..b662812 100644 --- a/google/cloud/monitoring_dashboard/v1/proto/widget_pb2_grpc.py +++ b/google/cloud/monitoring_dashboard/v1/proto/widget_pb2_grpc.py @@ -1,4 +1,4 @@ -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/monitoring_dashboard/v1/proto/xychart.proto b/google/cloud/monitoring_dashboard/v1/proto/xychart.proto new file mode 100644 index 0000000..d241ae7 --- /dev/null +++ b/google/cloud/monitoring_dashboard/v1/proto/xychart.proto @@ -0,0 +1,146 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.monitoring.dashboard.v1; + +import "google/api/field_behavior.proto"; +import "google/monitoring/dashboard/v1/metrics.proto"; +import "google/protobuf/duration.proto"; + +option go_package = "google.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard"; +option java_multiple_files = true; +option java_outer_classname = "XyChartProto"; +option java_package = "com.google.monitoring.dashboard.v1"; +option ruby_package = "Google::Cloud::Monitoring::Dashboard::V1"; + +// A chart that displays data on a 2D (X and Y axes) plane. +message XyChart { + // Groups a time series query definition with charting options. + message DataSet { + // The types of plotting strategies for data sets. + enum PlotType { + // Plot type is unspecified. The view will default to `LINE`. + PLOT_TYPE_UNSPECIFIED = 0; + + // The data is plotted as a set of lines (one line per series). + LINE = 1; + + // The data is plotted as a set of filled areas (one area per series), + // with the areas stacked vertically (the base of each area is the top of + // its predecessor, and the base of the first area is the X axis). Since + // the areas do not overlap, each is filled with a different opaque color. + STACKED_AREA = 2; + + // The data is plotted as a set of rectangular boxes (one box per series), + // with the boxes stacked vertically (the base of each box is the top of + // its predecessor, and the base of the first box is the X axis). Since + // the boxes do not overlap, each is filled with a different opaque color. + STACKED_BAR = 3; + + // The data is plotted as a heatmap. The series being plotted must have a + // `DISTRIBUTION` value type. The value of each bucket in the distribution + // is displayed as a color. This type is not currently available in the + // Stackdriver Monitoring application. + HEATMAP = 4; + } + + // Required. Fields for querying time series data from the + // Stackdriver metrics API. + TimeSeriesQuery time_series_query = 1 [(google.api.field_behavior) = REQUIRED]; + + // How this data should be plotted on the chart. + PlotType plot_type = 2; + + // A template string for naming `TimeSeries` in the resulting data set. + // This should be a string with interpolations of the form `${label_name}`, + // which will resolve to the label's value. + string legend_template = 3; + + // Optional. The lower bound on data point frequency for this data set, implemented by + // specifying the minimum alignment period to use in a time series query + // For example, if the data is published once every 10 minutes, the + // `min_alignment_period` should be at least 10 minutes. It would not + // make sense to fetch and align data at one minute intervals. + google.protobuf.Duration min_alignment_period = 4 [(google.api.field_behavior) = OPTIONAL]; + } + + // A chart axis. + message Axis { + // Types of scales used in axes. + enum Scale { + // Scale is unspecified. The view will default to `LINEAR`. + SCALE_UNSPECIFIED = 0; + + // Linear scale. + LINEAR = 1; + + // Logarithmic scale (base 10). + LOG10 = 2; + } + + // The label of the axis. + string label = 1; + + // The axis scale. By default, a linear scale is used. + Scale scale = 2; + } + + // Required. The data displayed in this chart. + repeated DataSet data_sets = 1 [(google.api.field_behavior) = REQUIRED]; + + // The duration used to display a comparison chart. A comparison chart + // simultaneously shows values from two similar-length time periods + // (e.g., week-over-week metrics). + // The duration must be positive, and it can only be applied to charts with + // data sets of LINE plot type. + google.protobuf.Duration timeshift_duration = 4; + + // Threshold lines drawn horizontally across the chart. + repeated Threshold thresholds = 5; + + // The properties applied to the X axis. + Axis x_axis = 6; + + // The properties applied to the Y axis. + Axis y_axis = 7; + + // Display options for the chart. + ChartOptions chart_options = 8; +} + +// Options to control visual rendering of a chart. +message ChartOptions { + // Chart mode options. + enum Mode { + // Mode is unspecified. The view will default to `COLOR`. + MODE_UNSPECIFIED = 0; + + // The chart distinguishes data series using different color. Line + // colors may get reused when there are many lines in the chart. + COLOR = 1; + + // The chart uses the Stackdriver x-ray mode, in which each + // data set is plotted using the same semi-transparent color. + X_RAY = 2; + + // The chart displays statistics such as average, median, 95th percentile, + // and more. + STATS = 3; + } + + // The chart mode. + Mode mode = 1; +} diff --git a/google/cloud/monitoring_dashboard/v1/proto/xychart_pb2.py b/google/cloud/monitoring_dashboard/v1/proto/xychart_pb2.py index d9a37e0..1212df8 100644 --- a/google/cloud/monitoring_dashboard/v1/proto/xychart_pb2.py +++ b/google/cloud/monitoring_dashboard/v1/proto/xychart_pb2.py @@ -1,6 +1,6 @@ # -*- coding: utf-8 -*- -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. @@ -15,11 +15,8 @@ # limitations under the License. # Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring/dashboard_v1/proto/xychart.proto +# source: google/cloud/monitoring_dashboard_v1/proto/xychart.proto -import sys - -_b = sys.version_info[0] < 3 and (lambda x: x) or (lambda x: x.encode("latin1")) from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection @@ -32,24 +29,21 @@ from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 from google.cloud.monitoring_dashboard.v1.proto import ( - metrics_pb2 as google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_metrics__pb2, + metrics_pb2 as google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_metrics__pb2, ) from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring/dashboard_v1/proto/xychart.proto", + name="google/cloud/monitoring_dashboard_v1/proto/xychart.proto", package="google.monitoring.dashboard.v1", syntax="proto3", - serialized_options=_b( - '\n"com.google.monitoring.dashboard.v1B\014XyChartProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard' - ), - serialized_pb=_b( - '\n8google/cloud/monitoring/dashboard_v1/proto/xychart.proto\x12\x1egoogle.monitoring.dashboard.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x38google/cloud/monitoring/dashboard_v1/proto/metrics.proto\x1a\x1egoogle/protobuf/duration.proto"\xf3\x06\n\x07XyChart\x12\x42\n\tdata_sets\x18\x01 \x03(\x0b\x32/.google.monitoring.dashboard.v1.XyChart.DataSet\x12\x35\n\x12timeshift_duration\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration\x12=\n\nthresholds\x18\x05 \x03(\x0b\x32).google.monitoring.dashboard.v1.Threshold\x12<\n\x06x_axis\x18\x06 \x01(\x0b\x32,.google.monitoring.dashboard.v1.XyChart.Axis\x12<\n\x06y_axis\x18\x07 \x01(\x0b\x32,.google.monitoring.dashboard.v1.XyChart.Axis\x12\x43\n\rchart_options\x18\x08 \x01(\x0b\x32,.google.monitoring.dashboard.v1.ChartOptions\x1a\xda\x02\n\x07\x44\x61taSet\x12J\n\x11time_series_query\x18\x01 \x01(\x0b\x32/.google.monitoring.dashboard.v1.TimeSeriesQuery\x12K\n\tplot_type\x18\x02 \x01(\x0e\x32\x38.google.monitoring.dashboard.v1.XyChart.DataSet.PlotType\x12\x17\n\x0flegend_template\x18\x03 \x01(\t\x12<\n\x14min_alignment_period\x18\x04 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01"_\n\x08PlotType\x12\x19\n\x15PLOT_TYPE_UNSPECIFIED\x10\x00\x12\x08\n\x04LINE\x10\x01\x12\x10\n\x0cSTACKED_AREA\x10\x02\x12\x0f\n\x0bSTACKED_BAR\x10\x03\x12\x0b\n\x07HEATMAP\x10\x04\x1a\x8f\x01\n\x04\x41xis\x12\r\n\x05label\x18\x01 \x01(\t\x12\x41\n\x05scale\x18\x02 \x01(\x0e\x32\x32.google.monitoring.dashboard.v1.XyChart.Axis.Scale"5\n\x05Scale\x12\x15\n\x11SCALE_UNSPECIFIED\x10\x00\x12\n\n\x06LINEAR\x10\x01\x12\t\n\x05LOG10\x10\x02"\x8e\x01\n\x0c\x43hartOptions\x12?\n\x04mode\x18\x01 \x01(\x0e\x32\x31.google.monitoring.dashboard.v1.ChartOptions.Mode"=\n\x04Mode\x12\x14\n\x10MODE_UNSPECIFIED\x10\x00\x12\t\n\x05\x43OLOR\x10\x01\x12\t\n\x05X_RAY\x10\x02\x12\t\n\x05STATS\x10\x03\x42}\n"com.google.monitoring.dashboard.v1B\x0cXyChartProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboardb\x06proto3' - ), + serialized_options=b'\n"com.google.monitoring.dashboard.v1B\014XyChartProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\352\002(Google::Cloud::Monitoring::Dashboard::V1', + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n8google/cloud/monitoring_dashboard_v1/proto/xychart.proto\x12\x1egoogle.monitoring.dashboard.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x38google/cloud/monitoring_dashboard_v1/proto/metrics.proto\x1a\x1egoogle/protobuf/duration.proto"\xfd\x06\n\x07XyChart\x12G\n\tdata_sets\x18\x01 \x03(\x0b\x32/.google.monitoring.dashboard.v1.XyChart.DataSetB\x03\xe0\x41\x02\x12\x35\n\x12timeshift_duration\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration\x12=\n\nthresholds\x18\x05 \x03(\x0b\x32).google.monitoring.dashboard.v1.Threshold\x12<\n\x06x_axis\x18\x06 \x01(\x0b\x32,.google.monitoring.dashboard.v1.XyChart.Axis\x12<\n\x06y_axis\x18\x07 \x01(\x0b\x32,.google.monitoring.dashboard.v1.XyChart.Axis\x12\x43\n\rchart_options\x18\x08 \x01(\x0b\x32,.google.monitoring.dashboard.v1.ChartOptions\x1a\xdf\x02\n\x07\x44\x61taSet\x12O\n\x11time_series_query\x18\x01 \x01(\x0b\x32/.google.monitoring.dashboard.v1.TimeSeriesQueryB\x03\xe0\x41\x02\x12K\n\tplot_type\x18\x02 \x01(\x0e\x32\x38.google.monitoring.dashboard.v1.XyChart.DataSet.PlotType\x12\x17\n\x0flegend_template\x18\x03 \x01(\t\x12<\n\x14min_alignment_period\x18\x04 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01"_\n\x08PlotType\x12\x19\n\x15PLOT_TYPE_UNSPECIFIED\x10\x00\x12\x08\n\x04LINE\x10\x01\x12\x10\n\x0cSTACKED_AREA\x10\x02\x12\x0f\n\x0bSTACKED_BAR\x10\x03\x12\x0b\n\x07HEATMAP\x10\x04\x1a\x8f\x01\n\x04\x41xis\x12\r\n\x05label\x18\x01 \x01(\t\x12\x41\n\x05scale\x18\x02 \x01(\x0e\x32\x32.google.monitoring.dashboard.v1.XyChart.Axis.Scale"5\n\x05Scale\x12\x15\n\x11SCALE_UNSPECIFIED\x10\x00\x12\n\n\x06LINEAR\x10\x01\x12\t\n\x05LOG10\x10\x02"\x8e\x01\n\x0c\x43hartOptions\x12?\n\x04mode\x18\x01 \x01(\x0e\x32\x31.google.monitoring.dashboard.v1.ChartOptions.Mode"=\n\x04Mode\x12\x14\n\x10MODE_UNSPECIFIED\x10\x00\x12\t\n\x05\x43OLOR\x10\x01\x12\t\n\x05X_RAY\x10\x02\x12\t\n\x05STATS\x10\x03\x42\xa8\x01\n"com.google.monitoring.dashboard.v1B\x0cXyChartProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\xea\x02(Google::Cloud::Monitoring::Dashboard::V1b\x06proto3', dependencies=[ google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_metrics__pb2.DESCRIPTOR, + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_metrics__pb2.DESCRIPTOR, google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, ], ) @@ -60,6 +54,7 @@ full_name="google.monitoring.dashboard.v1.XyChart.DataSet.PlotType", filename=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name="PLOT_TYPE_UNSPECIFIED", @@ -67,24 +62,45 @@ number=0, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="LINE", index=1, number=1, serialized_options=None, type=None + name="LINE", + index=1, + number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="STACKED_AREA", index=2, number=2, serialized_options=None, type=None + name="STACKED_AREA", + index=2, + number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="STACKED_BAR", index=3, number=3, serialized_options=None, type=None + name="STACKED_BAR", + index=3, + number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="HEATMAP", index=4, number=4, serialized_options=None, type=None + name="HEATMAP", + index=4, + number=4, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), ], containing_type=None, serialized_options=None, - serialized_start=858, - serialized_end=953, + serialized_start=868, + serialized_end=963, ) _sym_db.RegisterEnumDescriptor(_XYCHART_DATASET_PLOTTYPE) @@ -93,6 +109,7 @@ full_name="google.monitoring.dashboard.v1.XyChart.Axis.Scale", filename=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name="SCALE_UNSPECIFIED", @@ -100,18 +117,29 @@ number=0, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="LINEAR", index=1, number=1, serialized_options=None, type=None + name="LINEAR", + index=1, + number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="LOG10", index=2, number=2, serialized_options=None, type=None + name="LOG10", + index=2, + number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), ], containing_type=None, serialized_options=None, - serialized_start=1046, - serialized_end=1099, + serialized_start=1056, + serialized_end=1109, ) _sym_db.RegisterEnumDescriptor(_XYCHART_AXIS_SCALE) @@ -120,6 +148,7 @@ full_name="google.monitoring.dashboard.v1.ChartOptions.Mode", filename=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name="MODE_UNSPECIFIED", @@ -127,21 +156,37 @@ number=0, serialized_options=None, type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="COLOR", index=1, number=1, serialized_options=None, type=None + name="COLOR", + index=1, + number=1, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="X_RAY", index=2, number=2, serialized_options=None, type=None + name="X_RAY", + index=2, + number=2, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), _descriptor.EnumValueDescriptor( - name="STATS", index=3, number=3, serialized_options=None, type=None + name="STATS", + index=3, + number=3, + serialized_options=None, + type=None, + create_key=_descriptor._internal_create_key, ), ], containing_type=None, serialized_options=None, - serialized_start=1183, - serialized_end=1244, + serialized_start=1193, + serialized_end=1254, ) _sym_db.RegisterEnumDescriptor(_CHARTOPTIONS_MODE) @@ -152,6 +197,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="time_series_query", @@ -168,8 +214,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=b"\340A\002", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="plot_type", @@ -188,6 +235,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="legend_template", @@ -198,7 +246,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -206,6 +254,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="min_alignment_period", @@ -222,20 +271,21 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=_b("\340A\001"), + serialized_options=b"\340A\001", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], nested_types=[], - enum_types=[_XYCHART_DATASET_PLOTTYPE], + enum_types=[_XYCHART_DATASET_PLOTTYPE,], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=607, - serialized_end=953, + serialized_start=612, + serialized_end=963, ) _XYCHART_AXIS = _descriptor.Descriptor( @@ -244,6 +294,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="label", @@ -254,7 +305,7 @@ cpp_type=9, label=1, has_default_value=False, - default_value=_b("").decode("utf-8"), + default_value=b"".decode("utf-8"), message_type=None, enum_type=None, containing_type=None, @@ -262,6 +313,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="scale", @@ -280,18 +332,19 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], nested_types=[], - enum_types=[_XYCHART_AXIS_SCALE], + enum_types=[_XYCHART_AXIS_SCALE,], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=956, - serialized_end=1099, + serialized_start=966, + serialized_end=1109, ) _XYCHART = _descriptor.Descriptor( @@ -300,6 +353,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="data_sets", @@ -316,8 +370,9 @@ containing_type=None, is_extension=False, extension_scope=None, - serialized_options=None, + serialized_options=b"\340A\002", file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="timeshift_duration", @@ -336,6 +391,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="thresholds", @@ -354,6 +410,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="x_axis", @@ -372,6 +429,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="y_axis", @@ -390,6 +448,7 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), _descriptor.FieldDescriptor( name="chart_options", @@ -408,10 +467,11 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, ), ], extensions=[], - nested_types=[_XYCHART_DATASET, _XYCHART_AXIS], + nested_types=[_XYCHART_DATASET, _XYCHART_AXIS,], enum_types=[], serialized_options=None, is_extendable=False, @@ -419,7 +479,7 @@ extension_ranges=[], oneofs=[], serialized_start=216, - serialized_end=1099, + serialized_end=1109, ) @@ -429,6 +489,7 @@ filename=None, file=DESCRIPTOR, containing_type=None, + create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name="mode", @@ -447,24 +508,25 @@ extension_scope=None, serialized_options=None, file=DESCRIPTOR, - ) + create_key=_descriptor._internal_create_key, + ), ], extensions=[], nested_types=[], - enum_types=[_CHARTOPTIONS_MODE], + enum_types=[_CHARTOPTIONS_MODE,], serialized_options=None, is_extendable=False, syntax="proto3", extension_ranges=[], oneofs=[], - serialized_start=1102, - serialized_end=1244, + serialized_start=1112, + serialized_end=1254, ) _XYCHART_DATASET.fields_by_name[ "time_series_query" ].message_type = ( - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_metrics__pb2._TIMESERIESQUERY + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_metrics__pb2._TIMESERIESQUERY ) _XYCHART_DATASET.fields_by_name["plot_type"].enum_type = _XYCHART_DATASET_PLOTTYPE _XYCHART_DATASET.fields_by_name[ @@ -482,7 +544,7 @@ _XYCHART.fields_by_name[ "thresholds" ].message_type = ( - google_dot_cloud_dot_monitoring_dot_dashboard__v1_dot_proto_dot_metrics__pb2._THRESHOLD + google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_metrics__pb2._THRESHOLD ) _XYCHART.fields_by_name["x_axis"].message_type = _XYCHART_AXIS _XYCHART.fields_by_name["y_axis"].message_type = _XYCHART_AXIS @@ -502,21 +564,20 @@ (_message.Message,), { "DESCRIPTOR": _XYCHART_DATASET, - "__module__": "google.cloud.monitoring.dashboard_v1.proto.xychart_pb2", - "__doc__": """Groups a time series query definition with charting - options. - + "__module__": "google.cloud.monitoring_dashboard.v1.proto.xychart_pb2", + "__doc__": """Groups a time series query definition with charting options. Attributes: time_series_query: - Fields for querying time series data from the Stackdriver - metrics API. + Required. Fields for querying time series data from the + Stackdriver metrics API. plot_type: How this data should be plotted on the chart. legend_template: A template string for naming ``TimeSeries`` in the resulting data set. This should be a string with interpolations of the - form ${label_name}, which will resolve to the label’s value. + form ``${label_name}``, which will resolve to the label’s + value. min_alignment_period: Optional. The lower bound on data point frequency for this data set, implemented by specifying the minimum alignment @@ -534,10 +595,9 @@ (_message.Message,), { "DESCRIPTOR": _XYCHART_AXIS, - "__module__": "google.cloud.monitoring.dashboard_v1.proto.xychart_pb2", + "__module__": "google.cloud.monitoring_dashboard.v1.proto.xychart_pb2", "__doc__": """A chart axis. - Attributes: label: The label of the axis. @@ -548,13 +608,12 @@ }, ), "DESCRIPTOR": _XYCHART, - "__module__": "google.cloud.monitoring.dashboard_v1.proto.xychart_pb2", + "__module__": "google.cloud.monitoring_dashboard.v1.proto.xychart_pb2", "__doc__": """A chart that displays data on a 2D (X and Y axes) plane. - Attributes: data_sets: - The data displayed in this chart. + Required. The data displayed in this chart. timeshift_duration: The duration used to display a comparison chart. A comparison chart simultaneously shows values from two similar-length time @@ -582,10 +641,9 @@ (_message.Message,), { "DESCRIPTOR": _CHARTOPTIONS, - "__module__": "google.cloud.monitoring.dashboard_v1.proto.xychart_pb2", + "__module__": "google.cloud.monitoring_dashboard.v1.proto.xychart_pb2", "__doc__": """Options to control visual rendering of a chart. - Attributes: mode: The chart mode. @@ -597,5 +655,7 @@ DESCRIPTOR._options = None +_XYCHART_DATASET.fields_by_name["time_series_query"]._options = None _XYCHART_DATASET.fields_by_name["min_alignment_period"]._options = None +_XYCHART.fields_by_name["data_sets"]._options = None # @@protoc_insertion_point(module_scope) diff --git a/google/cloud/monitoring_dashboard/v1/proto/xychart_pb2_grpc.py b/google/cloud/monitoring_dashboard/v1/proto/xychart_pb2_grpc.py index 7343170..b662812 100644 --- a/google/cloud/monitoring_dashboard/v1/proto/xychart_pb2_grpc.py +++ b/google/cloud/monitoring_dashboard/v1/proto/xychart_pb2_grpc.py @@ -1,4 +1,4 @@ -# Copyright 2019 Google LLC +# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. diff --git a/google/cloud/monitoring_dashboard/v1/types.py b/google/cloud/monitoring_dashboard/v1/types.py index 8d75f39..3e20510 100644 --- a/google/cloud/monitoring_dashboard/v1/types.py +++ b/google/cloud/monitoring_dashboard/v1/types.py @@ -33,7 +33,10 @@ from google.protobuf import empty_pb2 -_shared_modules = [duration_pb2, empty_pb2] +_shared_modules = [ + duration_pb2, + empty_pb2, +] _local_modules = [ common_pb2, diff --git a/noxfile.py b/noxfile.py index 8036518..248fcd3 100644 --- a/noxfile.py +++ b/noxfile.py @@ -23,14 +23,15 @@ import nox -BLACK_VERSION = "black==19.3b0" +BLACK_VERSION = "black==19.10b0" BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] -if os.path.exists("samples"): - BLACK_PATHS.append("samples") +DEFAULT_PYTHON_VERSION = "3.8" +SYSTEM_TEST_PYTHON_VERSIONS = ["2.7", "3.8"] +UNIT_TEST_PYTHON_VERSIONS = ["2.7", "3.5", "3.6", "3.7", "3.8"] -@nox.session(python="3.7") +@nox.session(python=DEFAULT_PYTHON_VERSION) def lint(session): """Run linters. @@ -38,7 +39,9 @@ def lint(session): serious code quality issues. """ session.install("flake8", BLACK_VERSION) - session.run("black", "--check", *BLACK_PATHS) + session.run( + "black", "--check", *BLACK_PATHS, + ) session.run("flake8", "google", "tests") @@ -53,10 +56,12 @@ def blacken(session): check the state of the `gcp_ubuntu_config` we use for that Kokoro run. """ session.install(BLACK_VERSION) - session.run("black", *BLACK_PATHS) + session.run( + "black", *BLACK_PATHS, + ) -@nox.session(python="3.7") +@nox.session(python=DEFAULT_PYTHON_VERSION) def lint_setup_py(session): """Verify that setup.py is valid (including RST check).""" session.install("docutils", "pygments") @@ -84,13 +89,13 @@ def default(session): ) -@nox.session(python=["2.7", "3.5", "3.6", "3.7", "3.8"]) +@nox.session(python=UNIT_TEST_PYTHON_VERSIONS) def unit(session): """Run the unit test suite.""" default(session) -@nox.session(python=["2.7", "3.7"]) +@nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" system_test_path = os.path.join("tests", "system.py") @@ -110,8 +115,9 @@ def system(session): # Install all test dependencies, then install this package into the # virtualenv's dist-packages. - session.install("mock", "pytest") - + session.install( + "mock", "pytest", "google-cloud-testutils", + ) session.install("-e", ".") # Run py.test against the system tests. @@ -121,7 +127,7 @@ def system(session): session.run("py.test", "--quiet", system_test_folder_path, *session.posargs) -@nox.session(python="3.7") +@nox.session(python=DEFAULT_PYTHON_VERSION) def cover(session): """Run the final coverage report. @@ -134,7 +140,7 @@ def cover(session): session.run("coverage", "erase") -@nox.session(python="3.7") +@nox.session(python=DEFAULT_PYTHON_VERSION) def docs(session): """Build the docs for this library.""" diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh new file mode 100755 index 0000000..ff599eb --- /dev/null +++ b/scripts/decrypt-secrets.sh @@ -0,0 +1,33 @@ +#!/bin/bash + +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" +ROOT=$( dirname "$DIR" ) + +# Work from the project root. +cd $ROOT + +# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources. +PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}" + +gcloud secrets versions access latest --secret="python-docs-samples-test-env" \ + > testing/test-env.sh +gcloud secrets versions access latest \ + --secret="python-docs-samples-service-account" \ + > testing/service-account.json +gcloud secrets versions access latest \ + --secret="python-docs-samples-client-secrets" \ + > testing/client-secrets.json \ No newline at end of file diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py new file mode 100644 index 0000000..d309d6e --- /dev/null +++ b/scripts/readme-gen/readme_gen.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python + +# Copyright 2016 Google Inc +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generates READMEs using configuration defined in yaml.""" + +import argparse +import io +import os +import subprocess + +import jinja2 +import yaml + + +jinja_env = jinja2.Environment( + trim_blocks=True, + loader=jinja2.FileSystemLoader( + os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates')))) + +README_TMPL = jinja_env.get_template('README.tmpl.rst') + + +def get_help(file): + return subprocess.check_output(['python', file, '--help']).decode() + + +def main(): + parser = argparse.ArgumentParser() + parser.add_argument('source') + parser.add_argument('--destination', default='README.rst') + + args = parser.parse_args() + + source = os.path.abspath(args.source) + root = os.path.dirname(source) + destination = os.path.join(root, args.destination) + + jinja_env.globals['get_help'] = get_help + + with io.open(source, 'r') as f: + config = yaml.load(f) + + # This allows get_help to execute in the right directory. + os.chdir(root) + + output = README_TMPL.render(config) + + with io.open(destination, 'w') as f: + f.write(output) + + +if __name__ == '__main__': + main() diff --git a/scripts/readme-gen/templates/README.tmpl.rst b/scripts/readme-gen/templates/README.tmpl.rst new file mode 100644 index 0000000..4fd2397 --- /dev/null +++ b/scripts/readme-gen/templates/README.tmpl.rst @@ -0,0 +1,87 @@ +{# The following line is a lie. BUT! Once jinja2 is done with it, it will + become truth! #} +.. This file is automatically generated. Do not edit this file directly. + +{{product.name}} Python Samples +=============================================================================== + +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/README.rst + + +This directory contains samples for {{product.name}}. {{product.description}} + +{{description}} + +.. _{{product.name}}: {{product.url}} + +{% if required_api_url %} +To run the sample, you need to enable the API at: {{required_api_url}} +{% endif %} + +{% if required_role %} +To run the sample, you need to have `{{required_role}}` role. +{% endif %} + +{{other_required_steps}} + +{% if setup %} +Setup +------------------------------------------------------------------------------- + +{% for section in setup %} + +{% include section + '.tmpl.rst' %} + +{% endfor %} +{% endif %} + +{% if samples %} +Samples +------------------------------------------------------------------------------- + +{% for sample in samples %} +{{sample.name}} ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ + +{% if not sample.hide_cloudshell_button %} +.. image:: https://gstatic.com/cloudssh/images/open-btn.png + :target: https://console.cloud.google.com/cloudshell/open?git_repo=https://github.com/GoogleCloudPlatform/python-docs-samples&page=editor&open_in_editor={{folder}}/{{sample.file}},{{folder}}/README.rst +{% endif %} + + +{{sample.description}} + +To run this sample: + +.. code-block:: bash + + $ python {{sample.file}} +{% if sample.show_help %} + + {{get_help(sample.file)|indent}} +{% endif %} + + +{% endfor %} +{% endif %} + +{% if cloud_client_library %} + +The client library +------------------------------------------------------------------------------- + +This sample uses the `Google Cloud Client Library for Python`_. +You can read the documentation for more details on API usage and use GitHub +to `browse the source`_ and `report issues`_. + +.. _Google Cloud Client Library for Python: + https://googlecloudplatform.github.io/google-cloud-python/ +.. _browse the source: + https://github.com/GoogleCloudPlatform/google-cloud-python +.. _report issues: + https://github.com/GoogleCloudPlatform/google-cloud-python/issues + +{% endif %} + +.. _Google Cloud SDK: https://cloud.google.com/sdk/ \ No newline at end of file diff --git a/scripts/readme-gen/templates/auth.tmpl.rst b/scripts/readme-gen/templates/auth.tmpl.rst new file mode 100644 index 0000000..1446b94 --- /dev/null +++ b/scripts/readme-gen/templates/auth.tmpl.rst @@ -0,0 +1,9 @@ +Authentication +++++++++++++++ + +This sample requires you to have authentication setup. Refer to the +`Authentication Getting Started Guide`_ for instructions on setting up +credentials for applications. + +.. _Authentication Getting Started Guide: + https://cloud.google.com/docs/authentication/getting-started diff --git a/scripts/readme-gen/templates/auth_api_key.tmpl.rst b/scripts/readme-gen/templates/auth_api_key.tmpl.rst new file mode 100644 index 0000000..11957ce --- /dev/null +++ b/scripts/readme-gen/templates/auth_api_key.tmpl.rst @@ -0,0 +1,14 @@ +Authentication +++++++++++++++ + +Authentication for this service is done via an `API Key`_. To obtain an API +Key: + +1. Open the `Cloud Platform Console`_ +2. Make sure that billing is enabled for your project. +3. From the **Credentials** page, create a new **API Key** or use an existing + one for your project. + +.. _API Key: + https://developers.google.com/api-client-library/python/guide/aaa_apikeys +.. _Cloud Console: https://console.cloud.google.com/project?_ diff --git a/scripts/readme-gen/templates/install_deps.tmpl.rst b/scripts/readme-gen/templates/install_deps.tmpl.rst new file mode 100644 index 0000000..a0406db --- /dev/null +++ b/scripts/readme-gen/templates/install_deps.tmpl.rst @@ -0,0 +1,29 @@ +Install Dependencies +++++++++++++++++++++ + +#. Clone python-docs-samples and change directory to the sample directory you want to use. + + .. code-block:: bash + + $ git clone https://github.com/GoogleCloudPlatform/python-docs-samples.git + +#. Install `pip`_ and `virtualenv`_ if you do not already have them. You may want to refer to the `Python Development Environment Setup Guide`_ for Google Cloud Platform for instructions. + + .. _Python Development Environment Setup Guide: + https://cloud.google.com/python/setup + +#. Create a virtualenv. Samples are compatible with Python 2.7 and 3.4+. + + .. code-block:: bash + + $ virtualenv env + $ source env/bin/activate + +#. Install the dependencies needed to run the samples. + + .. code-block:: bash + + $ pip install -r requirements.txt + +.. _pip: https://pip.pypa.io/ +.. _virtualenv: https://virtualenv.pypa.io/ diff --git a/scripts/readme-gen/templates/install_portaudio.tmpl.rst b/scripts/readme-gen/templates/install_portaudio.tmpl.rst new file mode 100644 index 0000000..5ea33d1 --- /dev/null +++ b/scripts/readme-gen/templates/install_portaudio.tmpl.rst @@ -0,0 +1,35 @@ +Install PortAudio ++++++++++++++++++ + +Install `PortAudio`_. This is required by the `PyAudio`_ library to stream +audio from your computer's microphone. PyAudio depends on PortAudio for cross-platform compatibility, and is installed differently depending on the +platform. + +* For Mac OS X, you can use `Homebrew`_:: + + brew install portaudio + + **Note**: if you encounter an error when running `pip install` that indicates + it can't find `portaudio.h`, try running `pip install` with the following + flags:: + + pip install --global-option='build_ext' \ + --global-option='-I/usr/local/include' \ + --global-option='-L/usr/local/lib' \ + pyaudio + +* For Debian / Ubuntu Linux:: + + apt-get install portaudio19-dev python-all-dev + +* Windows may work without having to install PortAudio explicitly (it will get + installed with PyAudio). + +For more details, see the `PyAudio installation`_ page. + + +.. _PyAudio: https://people.csail.mit.edu/hubert/pyaudio/ +.. _PortAudio: http://www.portaudio.com/ +.. _PyAudio installation: + https://people.csail.mit.edu/hubert/pyaudio/#downloads +.. _Homebrew: http://brew.sh diff --git a/setup.cfg b/setup.cfg index 3bd5555..c3a2b39 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,3 +1,19 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + # Generated by synthtool. DO NOT EDIT! [bdist_wheel] universal = 1 diff --git a/synth.metadata b/synth.metadata index 6e58f41..ff1feae 100644 --- a/synth.metadata +++ b/synth.metadata @@ -1,20 +1,32 @@ { - "updateTime": "2020-02-25T13:19:54.908974Z", "sources": [ + { + "git": { + "name": ".", + "remote": "https://github.com/googleapis/python-monitoring-dashboards.git", + "sha": "e6c765a969445b473dc25bdf2a09d63c044d5405" + } + }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "0b1876b35e98f560f9c9ca9797955f020238a092", - "internalRef": "296986742", - "log": "0b1876b35e98f560f9c9ca9797955f020238a092\nUse an older version of protoc-docs-plugin that is compatible with the specified gapic-generator and protobuf versions.\n\nprotoc-docs-plugin >=0.4.0 (see commit https://github.com/googleapis/protoc-docs-plugin/commit/979f03ede6678c487337f3d7e88bae58df5207af) is incompatible with protobuf 3.9.1.\n\nPiperOrigin-RevId: 296986742\n\n1e47e676cddbbd8d93f19ba0665af15b5532417e\nFix: Restore a method signature for UpdateCluster\n\nPiperOrigin-RevId: 296901854\n\n7f910bcc4fc4704947ccfd3ceed015d16b9e00c2\nUpdate Dataproc v1beta2 client.\n\nPiperOrigin-RevId: 296451205\n\nde287524405a3dce124d301634731584fc0432d7\nFix: Reinstate method signatures that had been missed off some RPCs\nFix: Correct resource types for two fields\n\nPiperOrigin-RevId: 296435091\n\ne5bc9566ae057fb4c92f8b7e047f1c8958235b53\nDeprecate the endpoint_uris field, as it is unused.\n\nPiperOrigin-RevId: 296357191\n\n8c12e2b4dca94e12bff9f538bdac29524ff7ef7a\nUpdate Dataproc v1 client.\n\nPiperOrigin-RevId: 296336662\n\n17567c4a1ef0a9b50faa87024d66f8acbb561089\nRemoving erroneous comment, a la https://github.com/googleapis/java-speech/pull/103\n\nPiperOrigin-RevId: 296332968\n\n3eaaaf8626ce5b0c0bc7eee05e143beffa373b01\nAdd BUILD.bazel for v1 secretmanager.googleapis.com\n\nPiperOrigin-RevId: 296274723\n\ne76149c3d992337f85eeb45643106aacae7ede82\nMove securitycenter v1 to use generate from annotations.\n\nPiperOrigin-RevId: 296266862\n\n203740c78ac69ee07c3bf6be7408048751f618f8\nAdd StackdriverLoggingConfig field to Cloud Tasks v2 API.\n\nPiperOrigin-RevId: 296256388\n\ne4117d5e9ed8bbca28da4a60a94947ca51cb2083\nCreate a Bazel BUILD file for the google.actions.type export.\n\nPiperOrigin-RevId: 296212567\n\na9639a0a9854fd6e1be08bba1ac3897f4f16cb2f\nAdd secretmanager.googleapis.com v1 protos\n\nPiperOrigin-RevId: 295983266\n\nce4f4c21d9dd2bfab18873a80449b9d9851efde8\nasset: v1p1beta1 remove SearchResources and SearchIamPolicies\n\nPiperOrigin-RevId: 295861722\n\ncb61d6c2d070b589980c779b68ffca617f789116\nasset: v1p1beta1 remove SearchResources and SearchIamPolicies\n\nPiperOrigin-RevId: 295855449\n\nab2685d8d3a0e191dc8aef83df36773c07cb3d06\nfix: Dataproc v1 - AutoscalingPolicy annotation\n\nThis adds the second resource name pattern to the\nAutoscalingPolicy resource.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 295738415\n\n8a1020bf6828f6e3c84c3014f2c51cb62b739140\nUpdate cloud asset api v1p4beta1.\n\nPiperOrigin-RevId: 295286165\n\n5cfa105206e77670369e4b2225597386aba32985\nAdd service control related proto build rule.\n\nPiperOrigin-RevId: 295262088\n\nee4dddf805072004ab19ac94df2ce669046eec26\nmonitoring v3: Add prefix \"https://cloud.google.com/\" into the link for global access\ncl 295167522, get ride of synth.py hacks\n\nPiperOrigin-RevId: 295238095\n\nd9835e922ea79eed8497db270d2f9f85099a519c\nUpdate some minor docs changes about user event proto\n\nPiperOrigin-RevId: 295185610\n\n5f311e416e69c170243de722023b22f3df89ec1c\nfix: use correct PHP package name in gapic configuration\n\nPiperOrigin-RevId: 295161330\n\n6cdd74dcdb071694da6a6b5a206e3a320b62dd11\npubsub: v1 add client config annotations and retry config\n\nPiperOrigin-RevId: 295158776\n\n5169f46d9f792e2934d9fa25c36d0515b4fd0024\nAdded cloud asset api v1p4beta1.\n\nPiperOrigin-RevId: 295026522\n\n56b55aa8818cd0a532a7d779f6ef337ba809ccbd\nFix: Resource annotations for CreateTimeSeriesRequest and ListTimeSeriesRequest should refer to valid resources. TimeSeries is not a named resource.\n\nPiperOrigin-RevId: 294931650\n\n0646bc775203077226c2c34d3e4d50cc4ec53660\nRemove unnecessary languages from bigquery-related artman configuration files.\n\nPiperOrigin-RevId: 294809380\n\n8b78aa04382e3d4147112ad6d344666771bb1909\nUpdate backend.proto for schemes and protocol\n\nPiperOrigin-RevId: 294788800\n\n80b8f8b3de2359831295e24e5238641a38d8488f\nAdds artman config files for bigquerystorage endpoints v1beta2, v1alpha2, v1\n\nPiperOrigin-RevId: 294763931\n\n2c17ac33b226194041155bb5340c3f34733f1b3a\nAdd parameter to sample generated for UpdateInstance. Related to https://github.com/googleapis/python-redis/issues/4\n\nPiperOrigin-RevId: 294734008\n\nd5e8a8953f2acdfe96fb15e85eb2f33739623957\nMove bigquery datatransfer to gapic v2.\n\nPiperOrigin-RevId: 294703703\n\nefd36705972cfcd7d00ab4c6dfa1135bafacd4ae\nfix: Add two annotations that we missed.\n\nPiperOrigin-RevId: 294664231\n\n8a36b928873ff9c05b43859b9d4ea14cd205df57\nFix: Define the \"bigquery.googleapis.com/Table\" resource in the BigQuery Storage API (v1beta2).\n\nPiperOrigin-RevId: 294459768\n\nc7a3caa2c40c49f034a3c11079dd90eb24987047\nFix: Define the \"bigquery.googleapis.com/Table\" resource in the BigQuery Storage API (v1).\n\nPiperOrigin-RevId: 294456889\n\n5006247aa157e59118833658084345ee59af7c09\nFix: Make deprecated fields optional\nFix: Deprecate SetLoggingServiceRequest.zone in line with the comments\nFeature: Add resource name method signatures where appropriate\n\nPiperOrigin-RevId: 294383128\n\neabba40dac05c5cbe0fca3a35761b17e372036c4\nFix: C# and PHP package/namespace capitalization for BigQuery Storage v1.\n\nPiperOrigin-RevId: 294382444\n\nf8d9a858a7a55eba8009a23aa3f5cc5fe5e88dde\nfix: artman configuration file for bigtable-admin\n\nPiperOrigin-RevId: 294322616\n\n0f29555d1cfcf96add5c0b16b089235afbe9b1a9\nAPI definition for (not-yet-launched) GCS gRPC.\n\nPiperOrigin-RevId: 294321472\n\nfcc86bee0e84dc11e9abbff8d7c3529c0626f390\nfix: Bigtable Admin v2\n\nChange LRO metadata from PartialUpdateInstanceMetadata\nto UpdateInstanceMetadata. (Otherwise, it will not build.)\n\nPiperOrigin-RevId: 294264582\n\n6d9361eae2ebb3f42d8c7ce5baf4bab966fee7c0\nrefactor: Add annotations to Bigtable Admin v2.\n\nPiperOrigin-RevId: 294243406\n\nad7616f3fc8e123451c8b3a7987bc91cea9e6913\nFix: Resource type in CreateLogMetricRequest should use logging.googleapis.com.\nFix: ListLogEntries should have a method signature for convenience of calling it.\n\nPiperOrigin-RevId: 294222165\n\n63796fcbb08712676069e20a3e455c9f7aa21026\nFix: Remove extraneous resource definition for cloudkms.googleapis.com/CryptoKey.\n\nPiperOrigin-RevId: 294176658\n\ne7d8a694f4559201e6913f6610069cb08b39274e\nDepend on the latest gapic-generator and resource names plugin.\n\nThis fixes the very old an very annoying bug: https://github.com/googleapis/gapic-generator/pull/3087\n\nPiperOrigin-RevId: 293903652\n\n806b2854a966d55374ee26bb0cef4e30eda17b58\nfix: correct capitalization of Ruby namespaces in SecurityCenter V1p1beta1\n\nPiperOrigin-RevId: 293903613\n\n1b83c92462b14d67a7644e2980f723112472e03a\nPublish annotations and grpc service config for Logging API.\n\nPiperOrigin-RevId: 293893514\n\ne46f761cd6ec15a9e3d5ed4ff321a4bcba8e8585\nGenerate the Bazel build file for recommendengine public api\n\nPiperOrigin-RevId: 293710856\n\n68477017c4173c98addac0373950c6aa9d7b375f\nMake `language_code` optional for UpdateIntentRequest and BatchUpdateIntentsRequest.\n\nThe comments and proto annotations describe this parameter as optional.\n\nPiperOrigin-RevId: 293703548\n\n16f823f578bca4e845a19b88bb9bc5870ea71ab2\nAdd BUILD.bazel files for managedidentities API\n\nPiperOrigin-RevId: 293698246\n\n2f53fd8178c9a9de4ad10fae8dd17a7ba36133f2\nAdd v1p1beta1 config file\n\nPiperOrigin-RevId: 293696729\n\n052b274138fce2be80f97b6dcb83ab343c7c8812\nAdd source field for user event and add field behavior annotations\n\nPiperOrigin-RevId: 293693115\n\n1e89732b2d69151b1b3418fff3d4cc0434f0dded\ndatacatalog: v1beta1 add three new RPCs to gapic v1beta1 config\n\nPiperOrigin-RevId: 293692823\n\n9c8bd09bbdc7c4160a44f1fbab279b73cd7a2337\nchange the name of AccessApproval service to AccessApprovalAdmin\n\nPiperOrigin-RevId: 293690934\n\n2e23b8fbc45f5d9e200572ca662fe1271bcd6760\nAdd ListEntryGroups method, add http bindings to support entry group tagging, and update some comments.\n\nPiperOrigin-RevId: 293666452\n\n0275e38a4ca03a13d3f47a9613aac8c8b0d3f1f2\nAdd proto_package field to managedidentities API. It is needed for APIs that still depend on artman generation.\n\nPiperOrigin-RevId: 293643323\n\n4cdfe8278cb6f308106580d70648001c9146e759\nRegenerating public protos for Data Catalog to add new Custom Type Entry feature.\n\nPiperOrigin-RevId: 293614782\n\n45d2a569ab526a1fad3720f95eefb1c7330eaada\nEnable client generation for v1 ManagedIdentities API.\n\nPiperOrigin-RevId: 293515675\n\n2c17086b77e6f3bcf04a1f65758dfb0c3da1568f\nAdd the Actions on Google common types (//google/actions/type/*).\n\nPiperOrigin-RevId: 293478245\n\n781aadb932e64a12fb6ead7cd842698d99588433\nDialogflow weekly v2/v2beta1 library update:\n- Documentation updates\nImportant updates are also posted at\nhttps://cloud.google.com/dialogflow/docs/release-notes\n\nPiperOrigin-RevId: 293443396\n\ne2602608c9138c2fca24162720e67f9307c30b95\nDialogflow weekly v2/v2beta1 library update:\n- Documentation updates\nImportant updates are also posted at\nhttps://cloud.google.com/dialogflow/docs/release-notes\n\nPiperOrigin-RevId: 293442964\n\nc8aef82028d06b7992278fa9294c18570dc86c3d\nAdd cc_proto_library and cc_grpc_library targets for Bigtable protos.\n\nAlso fix indentation of cc_grpc_library targets in Spanner and IAM protos.\n\nPiperOrigin-RevId: 293440538\n\ne2faab04f4cb7f9755072330866689b1943a16e9\ncloudtasks: v2 replace non-standard retry params in gapic config v2\n\nPiperOrigin-RevId: 293424055\n\ndfb4097ea628a8470292c6590a4313aee0c675bd\nerrorreporting: v1beta1 add legacy artman config for php\n\nPiperOrigin-RevId: 293423790\n\nb18aed55b45bfe5b62476292c72759e6c3e573c6\nasset: v1p1beta1 updated comment for `page_size` limit.\n\nPiperOrigin-RevId: 293421386\n\nc9ef36b7956d9859a2fc86ad35fcaa16958ab44f\nbazel: Refactor CI build scripts\n\nPiperOrigin-RevId: 293387911\n\na8ed9d921fdddc61d8467bfd7c1668f0ad90435c\nfix: set Ruby module name for OrgPolicy\n\nPiperOrigin-RevId: 293257997\n\n6c7d28509bd8315de8af0889688ee20099594269\nredis: v1beta1 add UpgradeInstance and connect_mode field to Instance\n\nPiperOrigin-RevId: 293242878\n\nae0abed4fcb4c21f5cb67a82349a049524c4ef68\nredis: v1 add connect_mode field to Instance\n\nPiperOrigin-RevId: 293241914\n\n3f7a0d29b28ee9365771da2b66edf7fa2b4e9c56\nAdds service config definition for bigqueryreservation v1beta1\n\nPiperOrigin-RevId: 293234418\n\n0c88168d5ed6fe353a8cf8cbdc6bf084f6bb66a5\naddition of BUILD & configuration for accessapproval v1\n\nPiperOrigin-RevId: 293219198\n\n39bedc2e30f4778ce81193f6ba1fec56107bcfc4\naccessapproval: v1 publish protos\n\nPiperOrigin-RevId: 293167048\n\n69d9945330a5721cd679f17331a78850e2618226\nAdd file-level `Session` resource definition\n\nPiperOrigin-RevId: 293080182\n\nf6a1a6b417f39694275ca286110bc3c1ca4db0dc\nAdd file-level `Session` resource definition\n\nPiperOrigin-RevId: 293080178\n\n29d40b78e3dc1579b0b209463fbcb76e5767f72a\nExpose managedidentities/v1beta1/ API for client library usage.\n\nPiperOrigin-RevId: 292979741\n\na22129a1fb6e18056d576dfb7717aef74b63734a\nExpose managedidentities/v1/ API for client library usage.\n\nPiperOrigin-RevId: 292968186\n\nb5cbe4a4ba64ab19e6627573ff52057a1657773d\nSecurityCenter v1p1beta1: move file-level option on top to workaround protobuf.js bug.\n\nPiperOrigin-RevId: 292647187\n\nb224b317bf20c6a4fbc5030b4a969c3147f27ad3\nAdds API definitions for bigqueryreservation v1beta1.\n\nPiperOrigin-RevId: 292634722\n\nc1468702f9b17e20dd59007c0804a089b83197d2\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 292626173\n\nffdfa4f55ab2f0afc11d0eb68f125ccbd5e404bd\nvision: v1p3beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292605599\n\n78f61482cd028fc1d9892aa5d89d768666a954cd\nvision: v1p1beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292605125\n\n60bb5a294a604fd1778c7ec87b265d13a7106171\nvision: v1p2beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292604980\n\n3bcf7aa79d45eb9ec29ab9036e9359ea325a7fc3\nvision: v1p4beta1 publish annotations and retry config\n\nPiperOrigin-RevId: 292604656\n\n2717b8a1c762b26911b45ecc2e4ee01d98401b28\nFix dataproc artman client library generation.\n\nPiperOrigin-RevId: 292555664\n\n7ac66d9be8a7d7de4f13566d8663978c9ee9dcd7\nAdd Dataproc Autoscaling API to V1.\n\nPiperOrigin-RevId: 292450564\n\n5d932b2c1be3a6ef487d094e3cf5c0673d0241dd\n- Improve documentation\n- Add a client_id field to StreamingPullRequest\n\nPiperOrigin-RevId: 292434036\n\neaff9fa8edec3e914995ce832b087039c5417ea7\nmonitoring: v3 publish annotations and client retry config\n\nPiperOrigin-RevId: 292425288\n\n70958bab8c5353870d31a23fb2c40305b050d3fe\nBigQuery Storage Read API v1 clients.\n\nPiperOrigin-RevId: 292407644\n\n7a15e7fe78ff4b6d5c9606a3264559e5bde341d1\nUpdate backend proto for Google Cloud Endpoints\n\nPiperOrigin-RevId: 292391607\n\n3ca2c014e24eb5111c8e7248b1e1eb833977c83d\nbazel: Add --flaky_test_attempts=3 argument to prevent CI failures caused by flaky tests\n\nPiperOrigin-RevId: 292382559\n\n9933347c1f677e81e19a844c2ef95bfceaf694fe\nbazel:Integrate latest protoc-java-resource-names-plugin changes (fix for PyYAML dependency in bazel rules)\n\nPiperOrigin-RevId: 292376626\n\nb835ab9d2f62c88561392aa26074c0b849fb0bd3\nasset: v1p2beta1 add client config annotations\n\n* remove unintentionally exposed RPCs\n* remove messages relevant to removed RPCs\n\nPiperOrigin-RevId: 292369593\n\nc1246a29e22b0f98e800a536b5b0da2d933a55f2\nUpdating v1 protos with the latest inline documentation (in comments) and config options. Also adding a per-service .yaml file.\n\nPiperOrigin-RevId: 292310790\n\nb491d07cadaae7cde5608321f913e5ca1459b32d\nRevert accidental local_repository change\n\nPiperOrigin-RevId: 292245373\n\naf3400a8cb6110025198b59a0f7d018ae3cda700\nUpdate gapic-generator dependency (prebuilt PHP binary support).\n\nPiperOrigin-RevId: 292243997\n\n341fd5690fae36f36cf626ef048fbcf4bbe7cee6\ngrafeas: v1 add resource_definition for the grafeas.io/Project and change references for Project.\n\nPiperOrigin-RevId: 292221998\n\n42e915ec2ece1cd37a590fbcd10aa2c0fb0e5b06\nUpdate the gapic-generator, protoc-java-resource-name-plugin and protoc-docs-plugin to the latest commit.\n\nPiperOrigin-RevId: 292182368\n\nf035f47250675d31492a09f4a7586cfa395520a7\nFix grafeas build and update build.sh script to include gerafeas.\n\nPiperOrigin-RevId: 292168753\n\n26ccb214b7bc4a716032a6266bcb0a9ca55d6dbb\nasset: v1p1beta1 add client config annotations and retry config\n\nPiperOrigin-RevId: 292154210\n\n974ee5c0b5d03e81a50dafcedf41e0efebb5b749\nasset: v1beta1 add client config annotations\n\nPiperOrigin-RevId: 292152573\n\ncf3b61102ed5f36b827bc82ec39be09525f018c8\n Fix to protos for v1p1beta1 release of Cloud Security Command Center\n\nPiperOrigin-RevId: 292034635\n\n4e1cfaa7c0fede9e65d64213ca3da1b1255816c0\nUpdate the public proto to support UTF-8 encoded id for CatalogService API, increase the ListCatalogItems deadline to 300s and some minor documentation change\n\nPiperOrigin-RevId: 292030970\n\n9c483584f8fd5a1b862ae07973f4cc7bb3e46648\nasset: add annotations to v1p1beta1\n\nPiperOrigin-RevId: 292009868\n\ne19209fac29731d0baf6d9ac23da1164f7bdca24\nAdd the google.rpc.context.AttributeContext message to the open source\ndirectories.\n\nPiperOrigin-RevId: 291999930\n\nae5662960573f279502bf98a108a35ba1175e782\noslogin API: move file level option on top of the file to avoid protobuf.js bug.\n\nPiperOrigin-RevId: 291990506\n\neba3897fff7c49ed85d3c47fc96fe96e47f6f684\nAdd cc_proto_library and cc_grpc_library targets for Spanner and IAM protos.\n\nPiperOrigin-RevId: 291988651\n\n8e981acfd9b97ea2f312f11bbaa7b6c16e412dea\nBeta launch for PersonDetection and FaceDetection features.\n\nPiperOrigin-RevId: 291821782\n\n994e067fae3b21e195f7da932b08fff806d70b5d\nasset: add annotations to v1p2beta1\n\nPiperOrigin-RevId: 291815259\n\n244e1d2c89346ca2e0701b39e65552330d68545a\nAdd Playable Locations service\n\nPiperOrigin-RevId: 291806349\n\n909f8f67963daf45dd88d020877fb9029b76788d\nasset: add annotations to v1beta2\n\nPiperOrigin-RevId: 291805301\n\n3c39a1d6e23c1ef63c7fba4019c25e76c40dfe19\nKMS: add file-level message for CryptoKeyPath, it is defined in gapic yaml but not\nin proto files.\n\nPiperOrigin-RevId: 291420695\n\nc6f3f350b8387f8d1b85ed4506f30187ebaaddc3\ncontaineranalysis: update v1beta1 and bazel build with annotations\n\nPiperOrigin-RevId: 291401900\n\n92887d74b44e4e636252b7b8477d0d2570cd82db\nfix: fix the location of grpc config file.\n\nPiperOrigin-RevId: 291396015\n\ne26cab8afd19d396b929039dac5d874cf0b5336c\nexpr: add default_host and method_signature annotations to CelService\n\nPiperOrigin-RevId: 291240093\n\n06093ae3952441c34ec176d1f7431b8765cec0be\nirm: fix v1alpha2 bazel build by adding missing proto imports\n\nPiperOrigin-RevId: 291227940\n\na8a2514af326e4673063f9a3c9d0ef1091c87e6c\nAdd proto annotation for cloud/irm API\n\nPiperOrigin-RevId: 291217859\n\n8d16f76de065f530d395a4c7eabbf766d6a120fd\nGenerate Memcache v1beta2 API protos and gRPC ServiceConfig files\n\nPiperOrigin-RevId: 291008516\n\n3af1dabd93df9a9f17bf3624d3b875c11235360b\ngrafeas: Add containeranalysis default_host to Grafeas service\n\nPiperOrigin-RevId: 290965849\n\nbe2663fa95e31cba67d0cd62611a6674db9f74b7\nfix(google/maps/roads): add missing opening bracket\n\nPiperOrigin-RevId: 290964086\n\nfacc26550a0af0696e0534bc9cae9df14275aa7c\nUpdating v2 protos with the latest inline documentation (in comments) and adding a per-service .yaml file.\n\nPiperOrigin-RevId: 290952261\n\ncda99c1f7dc5e4ca9b1caeae1dc330838cbc1461\nChange api_name to 'asset' for v1p1beta1\n\nPiperOrigin-RevId: 290800639\n\n94e9e90c303a820ce40643d9129e7f0d2054e8a1\nAdds Google Maps Road service\n\nPiperOrigin-RevId: 290795667\n\na3b23dcb2eaecce98c600c7d009451bdec52dbda\nrpc: new message ErrorInfo, other comment updates\n\nPiperOrigin-RevId: 290781668\n\n26420ef4e46c37f193c0fbe53d6ebac481de460e\nAdd proto definition for Org Policy v1.\n\nPiperOrigin-RevId: 290771923\n\n7f0dab8177cf371ae019a082e2512de7ac102888\nPublish Routes Preferred API v1 service definitions.\n\nPiperOrigin-RevId: 290326986\n\nad6e508d0728e1d1bca6e3f328cd562718cb772d\nFix: Qualify resource type references with \"jobs.googleapis.com/\"\n\nPiperOrigin-RevId: 290285762\n\n58e770d568a2b78168ddc19a874178fee8265a9d\ncts client library\n\nPiperOrigin-RevId: 290146169\n\naf9daa4c3b4c4a8b7133b81588dd9ffd37270af2\nAdd more programming language options to public proto\n\nPiperOrigin-RevId: 290144091\n\nd9f2bbf2df301ef84641d4cec7c828736a0bd907\ntalent: add missing resource.proto dep to Bazel build target\n\nPiperOrigin-RevId: 290143164\n\n3b3968237451d027b42471cd28884a5a1faed6c7\nAnnotate Talent API.\nAdd gRPC service config for retry.\nUpdate bazel file with google.api.resource dependency.\n\nPiperOrigin-RevId: 290125172\n\n0735b4b096872960568d1f366bfa75b7b0e1f1a3\nWeekly library update.\n\nPiperOrigin-RevId: 289939042\n\n8760d3d9a4543d7f9c0d1c7870aca08b116e4095\nWeekly library update.\n\nPiperOrigin-RevId: 289939020\n\n8607df842f782a901805187e02fff598145b0b0e\nChange Talent API timeout to 30s.\n\nPiperOrigin-RevId: 289912621\n\n908155991fe32570653bcb72ecfdcfc896642f41\nAdd Recommendations AI V1Beta1\n\nPiperOrigin-RevId: 289901914\n\n5c9a8c2bebd8b71aa66d1cc473edfaac837a2c78\nAdding no-arg method signatures for ListBillingAccounts and ListServices\n\nPiperOrigin-RevId: 289891136\n\n50b0e8286ac988b0593bd890eb31fef6ea2f5767\nlongrunning: add grpc service config and default_host annotation to operations.proto\n\nPiperOrigin-RevId: 289876944\n\n6cac27dabe51c54807b0401698c32d34998948a9\n Updating default deadline for Cloud Security Command Center's v1 APIs.\n\nPiperOrigin-RevId: 289875412\n\nd99df0d67057a233c711187e0689baa4f8e6333d\nFix: Correct spelling in C# namespace option\n\nPiperOrigin-RevId: 289709813\n\n2fa8d48165cc48e35b0c62e6f7bdade12229326c\nfeat: Publish Recommender v1 to GitHub.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289619243\n\n9118db63d1ab493a2e44a3b4973fde810a835c49\nfirestore: don't retry reads that fail with Aborted\n\nFor transaction reads that fail with ABORTED, we need to rollback and start a new transaction. Our current configuration makes it so that GAPIC retries ABORTED reads multiple times without making any progress. Instead, we should retry at the transaction level.\n\nPiperOrigin-RevId: 289532382\n\n1dbfd3fe4330790b1e99c0bb20beb692f1e20b8a\nFix bazel build\nAdd other langauges (Java was already there) for bigquery/storage/v1alpha2 api.\n\nPiperOrigin-RevId: 289519766\n\nc06599cdd7d11f8d3fd25f8d3249e5bb1a3d5d73\nInitial commit of google.cloud.policytroubleshooter API, The API helps in troubleshooting GCP policies. Refer https://cloud.google.com/iam/docs/troubleshooting-access for more information\n\nPiperOrigin-RevId: 289491444\n\nfce7d80fa16ea241e87f7bc33d68595422e94ecd\nDo not pass samples option for Artman config of recommender v1 API.\n\nPiperOrigin-RevId: 289477403\n\nef179e8c61436297e6bb124352e47e45c8c80cb1\nfix: Address missing Bazel dependency.\n\nBazel builds stopped working in 06ec6d5 because\nthe google/longrunning/operations.proto file took\nan import from google/api/client.proto, but that\nimport was not added to BUILD.bazel.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289446074\n\n8841655b242c84fd691d77d7bcf21b61044f01ff\nMigrate Data Labeling v1beta1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289446026\n\n06ec6d5d053fff299eaa6eaa38afdd36c5e2fc68\nAdd annotations to google.longrunning.v1\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289413169\n\n0480cf40be1d3cc231f4268a2fdb36a8dd60e641\nMigrate IAM Admin v1 to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289411084\n\n1017173e9adeb858587639af61889ad970c728b1\nSpecify a C# namespace for BigQuery Connection v1beta1\n\nPiperOrigin-RevId: 289396763\n\nb08714b378e8e5b0c4ecdde73f92c36d6303b4b6\nfix: Integrate latest proto-docs-plugin fix.\nFixes dialogflow v2\n\nPiperOrigin-RevId: 289189004\n\n51217a67e79255ee1f2e70a6a3919df082513327\nCreate BUILD file for recommender v1\n\nPiperOrigin-RevId: 289183234\n\nacacd87263c0a60e458561b8b8ce9f67c760552a\nGenerate recommender v1 API protos and gRPC ServiceConfig files\n\nPiperOrigin-RevId: 289177510\n\n9d2f7133b97720b1fa3601f6dcd30760ba6d8a1e\nFix kokoro build script\n\nPiperOrigin-RevId: 289166315\n\nc43a67530d2a47a0220cad20ca8de39b3fbaf2c5\ncloudtasks: replace missing RPC timeout config for v2beta2 and v2beta3\n\nPiperOrigin-RevId: 289162391\n\n4cefc229a9197236fc0adf02d69b71c0c5cf59de\nSynchronize new proto/yaml changes.\n\nPiperOrigin-RevId: 289158456\n\n56f263fe959c50786dab42e3c61402d32d1417bd\nCatalog API: Adding config necessary to build client libraries\n\nPiperOrigin-RevId: 289149879\n\n4543762b23a57fc3c53d409efc3a9affd47b6ab3\nFix Bazel build\nbilling/v1 and dialogflow/v2 remain broken (not bazel-related issues).\nBilling has wrong configuration, dialogflow failure is caused by a bug in documentation plugin.\n\nPiperOrigin-RevId: 289140194\n\nc9dce519127b97e866ca133a01157f4ce27dcceb\nUpdate Bigtable docs\n\nPiperOrigin-RevId: 289114419\n\n802c5c5f2bf94c3facb011267d04e71942e0d09f\nMigrate DLP to proto annotations (but not GAPIC v2).\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 289102579\n\n6357f30f2ec3cff1d8239d18b707ff9d438ea5da\nRemove gRPC configuration file that was in the wrong place.\n\nPiperOrigin-RevId: 289096111\n\n360a8792ed62f944109d7e22d613a04a010665b4\n Protos for v1p1beta1 release of Cloud Security Command Center\n\nPiperOrigin-RevId: 289011995\n\na79211c20c4f2807eec524d00123bf7c06ad3d6e\nRoll back containeranalysis v1 to GAPIC v1.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288999068\n\n9e60345ba603e03484a8aaa33ce5ffa19c1c652b\nPublish Routes Preferred API v1 proto definitions.\n\nPiperOrigin-RevId: 288941399\n\nd52885b642ad2aa1f42b132ee62dbf49a73e1e24\nMigrate the service management API to GAPIC v2.\n\nCommitter: @lukesneeringer\nPiperOrigin-RevId: 288909426\n\n6ace586805c08896fef43e28a261337fcf3f022b\ncloudtasks: replace missing RPC timeout config\n\nPiperOrigin-RevId: 288783603\n\n51d906cabee4876b12497054b15b05d4a50ad027\nImport of Grafeas from Github.\n\nUpdate BUILD.bazel accordingly.\n\nPiperOrigin-RevId: 288783426\n\n" + "sha": "184661793fbe3b89f2b485c303e7466cef9d21a1", + "internalRef": "316182409" } }, { - "template": { - "name": "python_library", - "origin": "synthtool.gcp", - "version": "2020.2.4" + "git": { + "name": "synthtool", + "remote": "https://github.com/googleapis/synthtool.git", + "sha": "2a29860e484695a07aa8670f9e750bfeeb308a43" + } + }, + { + "git": { + "name": "synthtool", + "remote": "https://github.com/googleapis/synthtool.git", + "sha": "2a29860e484695a07aa8670f9e750bfeeb308a43" } } ], @@ -22,7 +34,7 @@ { "client": { "source": "googleapis", - "apiName": "monitoring-dashboard", + "apiName": "monitoring_dashboard", "apiVersion": "v1", "language": "python", "generator": "bazel" diff --git a/synth.py b/synth.py index 511219d..d452233 100644 --- a/synth.py +++ b/synth.py @@ -77,9 +77,8 @@ templated_files = common.py_library(cov_level=79) s.move(templated_files) -# No local dependencies in a split repo -# Manually remove from noxfile until the template is updated -s.replace("noxfile.py", "LOCAL_DEPS = .*", "LOCAL_DEPS = []") +# TODO(busunkim): Use latest sphinx after microgenerator transition +s.replace("noxfile.py", """['"]sphinx['"]""", '"sphinx<3.0.0"') s.shell.run(["nox", "-s", "blacken"], hide_output=False) diff --git a/testing/.gitignore b/testing/.gitignore new file mode 100644 index 0000000..b05fbd6 --- /dev/null +++ b/testing/.gitignore @@ -0,0 +1,3 @@ +test-env.sh +service-account.json +client-secrets.json \ No newline at end of file diff --git a/tests/unit/gapic/v1/test_dashboards_service_client_v1.py b/tests/unit/gapic/v1/test_dashboards_service_client_v1.py index a60170d..1e0f376 100644 --- a/tests/unit/gapic/v1/test_dashboards_service_client_v1.py +++ b/tests/unit/gapic/v1/test_dashboards_service_client_v1.py @@ -127,7 +127,7 @@ def test_list_dashboards(self): client = v1.DashboardsServiceClient() # Setup Request - parent = "parent-995424086" + parent = client.project_path("[PROJECT]") paged_list_response = client.list_dashboards(parent) resources = list(paged_list_response) @@ -148,7 +148,7 @@ def test_list_dashboards_exception(self): client = v1.DashboardsServiceClient() # Setup request - parent = "parent-995424086" + parent = client.project_path("[PROJECT]") paged_list_response = client.list_dashboards(parent) with pytest.raises(CustomException): @@ -170,7 +170,7 @@ def test_get_dashboard(self): client = v1.DashboardsServiceClient() # Setup Request - name = "name3373707" + name = client.dashboard_path("[PROJECT]", "[DASHBOARD]") response = client.get_dashboard(name) assert expected_response == response @@ -189,7 +189,7 @@ def test_get_dashboard_exception(self): client = v1.DashboardsServiceClient() # Setup request - name = "name3373707" + name = client.dashboard_path("[PROJECT]", "[DASHBOARD]") with pytest.raises(CustomException): client.get_dashboard(name) @@ -202,7 +202,7 @@ def test_delete_dashboard(self): client = v1.DashboardsServiceClient() # Setup Request - name = "name3373707" + name = client.dashboard_path("[PROJECT]", "[DASHBOARD]") client.delete_dashboard(name) @@ -220,7 +220,7 @@ def test_delete_dashboard_exception(self): client = v1.DashboardsServiceClient() # Setup request - name = "name3373707" + name = client.dashboard_path("[PROJECT]", "[DASHBOARD]") with pytest.raises(CustomException): client.delete_dashboard(name)