diff --git a/README.rst b/README.rst index e90acde..9331752 100644 --- a/README.rst +++ b/README.rst @@ -34,6 +34,16 @@ In order to use this library, you first need to go through the following steps: .. _Enable the Stackdriver Monitoring Dashboards API.: https://cloud.google.com/monitoring/dashboards/ .. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html +Supported Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^ +Python >= 3.6 + +Deprecated Python Versions +^^^^^^^^^^^^^^^^^^^^^^^^^^ +Python == 2.7. + +The last version of this library compatible with Python 2.7 is google-cloud-monitoring-dashboards==1.0.0. + Installation ~~~~~~~~~~~~ diff --git a/UPGRADING.md b/UPGRADING.md new file mode 100644 index 0000000..698c267 --- /dev/null +++ b/UPGRADING.md @@ -0,0 +1,121 @@ +# 2.0.0 Migration Guide + +The 1.0 release of the `google-cloud-monitoring-dashboards` client is a significant upgrade based on a [next-gen code generator](https://github.com/googleapis/gapic-generator-python), and includes substantial interface changes. Existing code written for earlier versions of this library will likely require updates to use this version. This document describes the changes that have been made, and what you need to do to update your usage. + +If you experience issues or have questions, please file an [issue](https://github.com/googleapis/python-monitoring-dashboards/issues). + +## Supported Python Versions + +> **WARNING**: Breaking change +The 2.0.0 release requires Python 3.6+. + +## Create Service Client +> **WARNING**: Breaking change +The namespace for importing the service gets changed in the new release. + + +**Before:** +```py +from google.cloud.monitoring_dashboard import v1 +client = v1.DashboardsServiceClient() +``` +**After:** +```py +from google.cloud import monitoring_dashboard_v1 +client = monitoring_dashboard_v1.DashboardsServiceClient() +``` + +## Method Calls + +> **WARNING**: Breaking change +Methods expect request objects. We provide a script that will convert most common use cases. +* Install the library + +```py +python3 -m pip install google-cloud-monitoring-dashboards +``` + +* The scripts `fixup_dashboard_v1_keywords.py` shipped with the library. It expects +an input directory (with the code to convert) and an empty destination directory. + +```sh +$ fixup_dashboard_v1_keywords.py --input-directory .samples/ --output-directory samples/ +``` + +**Before:** +```py +# TODO: Initialize `parent`: +parent = '' +# TODO: Initialize `dashboard`: +dashboard = {} +response = client.create_dashboard(parent, dashboard) +``` + +**After:** +```py +response = client.create_dashboard(request={"parent": "''", "dashboard": "{}"}) +``` + +### More Details + +In `google-cloud-monitoring-dashboards<2.0.0`, parameters required by the API were positional parameters and optional parameters were keyword parameters. + +**Before:** +```py + def create_dashboard( + self, + parent, + dashboard, + retry=google.api_core.gapic_v1.method.DEFAULT, + timeout=google.api_core.gapic_v1.method.DEFAULT, + metadata=None, + ): +``` + +In the 2.0.0 release, all methods have a single positional parameter `request`. Method docstrings indicate whether a parameter is required or optional. + +Some methods have additional keyword only parameters. The available parameters depend on the [`google.api.method_signature` annotation] specified by the API producer. + + +**After:** +```py + def create_dashboard( + self, + request: dashboards_service.CreateDashboardRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dashboard.Dashboard: +``` + +> **NOTE:** The `request` parameter and flattened keyword parameters for the API are mutually exclusive. +> Passing both will result in an error. +Both of these calls are valid: + +```py +response = client.create_dashboard( + request={ + "parent": parent, + "dashboard": dashboard, + } +) +``` + +```py +response = client.create_dashboard( + parent=parent, + dashboard=dashboard, +) +``` + +This call is invalid because it mixes `request` with a keyword argument `dashboard`. Executing this code will result in an error. + +```py +response = client.create_dashboard( + request={ + "parent": parent, + }, + dashboard=dashboard +) +``` \ No newline at end of file diff --git a/docs/UPGRADING.md b/docs/UPGRADING.md new file mode 120000 index 0000000..01097c8 --- /dev/null +++ b/docs/UPGRADING.md @@ -0,0 +1 @@ +../UPGRADING.md \ No newline at end of file diff --git a/docs/dashboard_v1/services.rst b/docs/dashboard_v1/services.rst new file mode 100644 index 0000000..add1e89 --- /dev/null +++ b/docs/dashboard_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Monitoring Dashboard v1 API +=============================================== + +.. automodule:: google.cloud.monitoring_dashboard_v1.services.dashboards_service + :members: + :inherited-members: diff --git a/docs/dashboard_v1/types.rst b/docs/dashboard_v1/types.rst new file mode 100644 index 0000000..d9c96b5 --- /dev/null +++ b/docs/dashboard_v1/types.rst @@ -0,0 +1,5 @@ +Types for Google Monitoring Dashboard v1 API +============================================ + +.. automodule:: google.cloud.monitoring_dashboard_v1.types + :members: diff --git a/docs/gapic/v1/api.rst b/docs/gapic/v1/api.rst deleted file mode 100644 index 180c1c8..0000000 --- a/docs/gapic/v1/api.rst +++ /dev/null @@ -1,6 +0,0 @@ -Client for Cloud Monitoring API -=============================== - -.. automodule:: google.cloud.monitoring_dashboard.v1 - :members: - :inherited-members: \ No newline at end of file diff --git a/docs/gapic/v1/types.rst b/docs/gapic/v1/types.rst deleted file mode 100644 index 97bef83..0000000 --- a/docs/gapic/v1/types.rst +++ /dev/null @@ -1,5 +0,0 @@ -Types for Cloud Monitoring API Client -===================================== - -.. automodule:: google.cloud.monitoring_dashboard.v1.types - :members: \ No newline at end of file diff --git a/docs/index.rst b/docs/index.rst index 18af716..bd84b9b 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -7,5 +7,15 @@ Api Reference .. toctree:: :maxdepth: 2 - gapic/v1/api - gapic/v1/types \ No newline at end of file + dashboard_v1/services + dashboard_v1/types + +Migration Guide +--------------- + +See the guide below for instructions on migrating to the 2.x release of this library. + +.. toctree:: + :maxdepth: 2 + + UPGRADING diff --git a/google/__init__.py b/google/__init__.py deleted file mode 100644 index 9a1b64a..0000000 --- a/google/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/google/cloud/__init__.py b/google/cloud/__init__.py deleted file mode 100644 index 9a1b64a..0000000 --- a/google/cloud/__init__.py +++ /dev/null @@ -1,24 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -try: - import pkg_resources - - pkg_resources.declare_namespace(__name__) -except ImportError: - import pkgutil - - __path__ = pkgutil.extend_path(__path__, __name__) diff --git a/google/cloud/monitoring_dashboard/__init__.py b/google/cloud/monitoring_dashboard/__init__.py index e69de29..870fc49 100644 --- a/google/cloud/monitoring_dashboard/__init__.py +++ b/google/cloud/monitoring_dashboard/__init__.py @@ -0,0 +1,88 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.cloud.monitoring_dashboard_v1.services.dashboards_service.async_client import ( + DashboardsServiceAsyncClient, +) +from google.cloud.monitoring_dashboard_v1.services.dashboards_service.client import ( + DashboardsServiceClient, +) +from google.cloud.monitoring_dashboard_v1.types.common import Aggregation +from google.cloud.monitoring_dashboard_v1.types.common import PickTimeSeriesFilter +from google.cloud.monitoring_dashboard_v1.types.common import ( + StatisticalTimeSeriesFilter, +) +from google.cloud.monitoring_dashboard_v1.types.dashboard import Dashboard +from google.cloud.monitoring_dashboard_v1.types.dashboards_service import ( + CreateDashboardRequest, +) +from google.cloud.monitoring_dashboard_v1.types.dashboards_service import ( + DeleteDashboardRequest, +) +from google.cloud.monitoring_dashboard_v1.types.dashboards_service import ( + GetDashboardRequest, +) +from google.cloud.monitoring_dashboard_v1.types.dashboards_service import ( + ListDashboardsRequest, +) +from google.cloud.monitoring_dashboard_v1.types.dashboards_service import ( + ListDashboardsResponse, +) +from google.cloud.monitoring_dashboard_v1.types.dashboards_service import ( + UpdateDashboardRequest, +) +from google.cloud.monitoring_dashboard_v1.types.layouts import ColumnLayout +from google.cloud.monitoring_dashboard_v1.types.layouts import GridLayout +from google.cloud.monitoring_dashboard_v1.types.layouts import RowLayout +from google.cloud.monitoring_dashboard_v1.types.metrics import SparkChartType +from google.cloud.monitoring_dashboard_v1.types.metrics import Threshold +from google.cloud.monitoring_dashboard_v1.types.metrics import TimeSeriesFilter +from google.cloud.monitoring_dashboard_v1.types.metrics import TimeSeriesFilterRatio +from google.cloud.monitoring_dashboard_v1.types.metrics import TimeSeriesQuery +from google.cloud.monitoring_dashboard_v1.types.scorecard import Scorecard +from google.cloud.monitoring_dashboard_v1.types.text import Text +from google.cloud.monitoring_dashboard_v1.types.widget import Widget +from google.cloud.monitoring_dashboard_v1.types.xychart import ChartOptions +from google.cloud.monitoring_dashboard_v1.types.xychart import XyChart + +__all__ = ( + "Aggregation", + "ChartOptions", + "ColumnLayout", + "CreateDashboardRequest", + "Dashboard", + "DashboardsServiceAsyncClient", + "DashboardsServiceClient", + "DeleteDashboardRequest", + "GetDashboardRequest", + "GridLayout", + "ListDashboardsRequest", + "ListDashboardsResponse", + "PickTimeSeriesFilter", + "RowLayout", + "Scorecard", + "SparkChartType", + "StatisticalTimeSeriesFilter", + "Text", + "Threshold", + "TimeSeriesFilter", + "TimeSeriesFilterRatio", + "TimeSeriesQuery", + "UpdateDashboardRequest", + "Widget", + "XyChart", +) diff --git a/google/cloud/monitoring_dashboard/py.typed b/google/cloud/monitoring_dashboard/py.typed new file mode 100644 index 0000000..a52708e --- /dev/null +++ b/google/cloud/monitoring_dashboard/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-monitoring-dashboard package uses inline types. diff --git a/google/cloud/monitoring_dashboard/v1.py b/google/cloud/monitoring_dashboard/v1.py deleted file mode 100644 index 75e49ff..0000000 --- a/google/cloud/monitoring_dashboard/v1.py +++ /dev/null @@ -1,29 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import absolute_import - -from google.cloud.monitoring_dashboard.v1 import DashboardsServiceClient -from google.cloud.monitoring_dashboard.v1 import enums -from google.cloud.monitoring_dashboard.v1 import types - - -__all__ = ( - "enums", - "types", - "DashboardsServiceClient", -) diff --git a/google/cloud/monitoring_dashboard/v1/__init__.py b/google/cloud/monitoring_dashboard/v1/__init__.py deleted file mode 100644 index ad3d901..0000000 --- a/google/cloud/monitoring_dashboard/v1/__init__.py +++ /dev/null @@ -1,45 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import absolute_import -import sys -import warnings - -from google.cloud.monitoring_dashboard.v1 import types -from google.cloud.monitoring_dashboard.v1.gapic import dashboards_service_client -from google.cloud.monitoring_dashboard.v1.gapic import enums - - -if sys.version_info[:2] == (2, 7): - message = ( - "A future version of this library will drop support for Python 2.7. " - "More details about Python 2 support for Google Cloud Client Libraries " - "can be found at https://cloud.google.com/python/docs/python2-sunset/" - ) - warnings.warn(message, DeprecationWarning) - - -class DashboardsServiceClient(dashboards_service_client.DashboardsServiceClient): - __doc__ = dashboards_service_client.DashboardsServiceClient.__doc__ - enums = enums - - -__all__ = ( - "enums", - "types", - "DashboardsServiceClient", -) diff --git a/google/cloud/monitoring_dashboard/v1/gapic/__init__.py b/google/cloud/monitoring_dashboard/v1/gapic/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/google/cloud/monitoring_dashboard/v1/gapic/dashboards_service_client.py b/google/cloud/monitoring_dashboard/v1/gapic/dashboards_service_client.py deleted file mode 100644 index 38e05d3..0000000 --- a/google/cloud/monitoring_dashboard/v1/gapic/dashboards_service_client.py +++ /dev/null @@ -1,640 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Accesses the google.monitoring.dashboard.v1 DashboardsService API.""" - -import functools -import pkg_resources -import warnings - -from google.oauth2 import service_account -import google.api_core.client_options -import google.api_core.gapic_v1.client_info -import google.api_core.gapic_v1.config -import google.api_core.gapic_v1.method -import google.api_core.gapic_v1.routing_header -import google.api_core.grpc_helpers -import google.api_core.page_iterator -import google.api_core.path_template -import grpc - -from google.cloud.monitoring_dashboard.v1.gapic import dashboards_service_client_config -from google.cloud.monitoring_dashboard.v1.gapic import enums -from google.cloud.monitoring_dashboard.v1.gapic.transports import ( - dashboards_service_grpc_transport, -) -from google.cloud.monitoring_dashboard.v1.proto import dashboard_pb2 -from google.cloud.monitoring_dashboard.v1.proto import dashboards_service_pb2 -from google.cloud.monitoring_dashboard.v1.proto import dashboards_service_pb2_grpc -from google.protobuf import empty_pb2 - - -_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution( - "google-cloud-monitoring-dashboards", -).version - - -class DashboardsServiceClient(object): - """ - Manages Stackdriver dashboards. A dashboard is an arrangement of data display - widgets in a specific layout. - """ - - SERVICE_ADDRESS = "monitoring.googleapis.com:443" - """The default address of the service.""" - - # The name of the interface for this client. This is the key used to - # find the method configuration in the client_config dictionary. - _INTERFACE_NAME = "google.monitoring.dashboard.v1.DashboardsService" - - @classmethod - def from_service_account_file(cls, filename, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - DashboardsServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @classmethod - def dashboard_path(cls, project, dashboard): - """Return a fully-qualified dashboard string.""" - return google.api_core.path_template.expand( - "projects/{project}/dashboards/{dashboard}", - project=project, - dashboard=dashboard, - ) - - @classmethod - def project_path(cls, project): - """Return a fully-qualified project string.""" - return google.api_core.path_template.expand( - "projects/{project}", project=project, - ) - - def __init__( - self, - transport=None, - channel=None, - credentials=None, - client_config=None, - client_info=None, - client_options=None, - ): - """Constructor. - - Args: - transport (Union[~.DashboardsServiceGrpcTransport, - Callable[[~.Credentials, type], ~.DashboardsServiceGrpcTransport]): A transport - instance, responsible for actually making the API calls. - The default transport uses the gRPC protocol. - This argument may also be a callable which returns a - transport instance. Callables will be sent the credentials - as the first argument and the default transport class as - the second argument. - channel (grpc.Channel): DEPRECATED. A ``Channel`` instance - through which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is mutually exclusive with providing a - transport instance to ``transport``; doing so will raise - an exception. - client_config (dict): DEPRECATED. A dictionary of call options for - each method. If not specified, the default configuration is used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - client_options (Union[dict, google.api_core.client_options.ClientOptions]): - Client options used to set user options on the client. API Endpoint - should be set through client_options. - """ - # Raise deprecation warnings for things we want to go away. - if client_config is not None: - warnings.warn( - "The `client_config` argument is deprecated.", - PendingDeprecationWarning, - stacklevel=2, - ) - else: - client_config = dashboards_service_client_config.config - - if channel: - warnings.warn( - "The `channel` argument is deprecated; use " "`transport` instead.", - PendingDeprecationWarning, - stacklevel=2, - ) - - api_endpoint = self.SERVICE_ADDRESS - if client_options: - if type(client_options) == dict: - client_options = google.api_core.client_options.from_dict( - client_options - ) - if client_options.api_endpoint: - api_endpoint = client_options.api_endpoint - - # Instantiate the transport. - # The transport is responsible for handling serialization and - # deserialization and actually sending data to the service. - if transport: - if callable(transport): - self.transport = transport( - credentials=credentials, - default_class=dashboards_service_grpc_transport.DashboardsServiceGrpcTransport, - address=api_endpoint, - ) - else: - if credentials: - raise ValueError( - "Received both a transport instance and " - "credentials; these are mutually exclusive." - ) - self.transport = transport - else: - self.transport = dashboards_service_grpc_transport.DashboardsServiceGrpcTransport( - address=api_endpoint, channel=channel, credentials=credentials, - ) - - if client_info is None: - client_info = google.api_core.gapic_v1.client_info.ClientInfo( - gapic_version=_GAPIC_LIBRARY_VERSION, - ) - else: - client_info.gapic_version = _GAPIC_LIBRARY_VERSION - self._client_info = client_info - - # Parse out the default settings for retry and timeout for each RPC - # from the client configuration. - # (Ordinarily, these are the defaults specified in the `*_config.py` - # file next to this one.) - self._method_configs = google.api_core.gapic_v1.config.parse_method_configs( - client_config["interfaces"][self._INTERFACE_NAME], - ) - - # Save a dictionary of cached API call functions. - # These are the actual callables which invoke the proper - # transport methods, wrapped with `wrap_method` to add retry, - # timeout, and the like. - self._inner_api_calls = {} - - # Service calls - def create_dashboard( - self, - parent, - dashboard, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Creates a new custom dashboard. - - This method requires the ``monitoring.dashboards.create`` permission on - the specified project. For more information, see `Google Cloud - IAM `__. - - Example: - >>> from google.cloud.monitoring_dashboard import v1 - >>> - >>> client = v1.DashboardsServiceClient() - >>> - >>> # TODO: Initialize `parent`: - >>> parent = '' - >>> - >>> # TODO: Initialize `dashboard`: - >>> dashboard = {} - >>> - >>> response = client.create_dashboard(parent, dashboard) - - Args: - parent (str): Required. The project on which to execute the request. The format - is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - - The ``[PROJECT_ID_OR_NUMBER]`` must match the dashboard resource name. - dashboard (Union[dict, ~google.cloud.monitoring_dashboard.v1.types.Dashboard]): Required. The initial dashboard specification. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.monitoring_dashboard.v1.types.Dashboard` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.monitoring_dashboard.v1.types.Dashboard` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "create_dashboard" not in self._inner_api_calls: - self._inner_api_calls[ - "create_dashboard" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.create_dashboard, - default_retry=self._method_configs["CreateDashboard"].retry, - default_timeout=self._method_configs["CreateDashboard"].timeout, - client_info=self._client_info, - ) - - request = dashboards_service_pb2.CreateDashboardRequest( - parent=parent, dashboard=dashboard, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["create_dashboard"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def list_dashboards( - self, - parent, - page_size=None, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Lists the existing dashboards. - - This method requires the ``monitoring.dashboards.list`` permission on - the specified project. For more information, see `Google Cloud - IAM `__. - - Example: - >>> from google.cloud.monitoring_dashboard import v1 - >>> - >>> client = v1.DashboardsServiceClient() - >>> - >>> parent = client.project_path('[PROJECT]') - >>> - >>> # Iterate over all results - >>> for element in client.list_dashboards(parent): - ... # process element - ... pass - >>> - >>> - >>> # Alternatively: - >>> - >>> # Iterate over results one page at a time - >>> for page in client.list_dashboards(parent).pages: - ... for element in page: - ... # process element - ... pass - - Args: - parent (str): Required. The scope of the dashboards to list. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER] - page_size (int): The maximum number of resources contained in the - underlying API response. If page streaming is performed per- - resource, this parameter does not affect the return value. If page - streaming is performed per-page, this determines the maximum number - of resources in a page. - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.api_core.page_iterator.PageIterator` instance. - An iterable of :class:`~google.cloud.monitoring_dashboard.v1.types.Dashboard` instances. - You can also iterate over the pages of the response - using its `pages` property. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "list_dashboards" not in self._inner_api_calls: - self._inner_api_calls[ - "list_dashboards" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.list_dashboards, - default_retry=self._method_configs["ListDashboards"].retry, - default_timeout=self._method_configs["ListDashboards"].timeout, - client_info=self._client_info, - ) - - request = dashboards_service_pb2.ListDashboardsRequest( - parent=parent, page_size=page_size, - ) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("parent", parent)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - iterator = google.api_core.page_iterator.GRPCIterator( - client=None, - method=functools.partial( - self._inner_api_calls["list_dashboards"], - retry=retry, - timeout=timeout, - metadata=metadata, - ), - request=request, - items_field="dashboards", - request_token_field="page_token", - response_token_field="next_page_token", - ) - return iterator - - def get_dashboard( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Fetches a specific dashboard. - - This method requires the ``monitoring.dashboards.get`` permission on the - specified dashboard. For more information, see `Google Cloud - IAM `__. - - Example: - >>> from google.cloud.monitoring_dashboard import v1 - >>> - >>> client = v1.DashboardsServiceClient() - >>> - >>> name = client.dashboard_path('[PROJECT]', '[DASHBOARD]') - >>> - >>> response = client.get_dashboard(name) - - Args: - name (str): Required. The resource name of the Dashboard. The format is one of: - - - ``dashboards/[DASHBOARD_ID]`` (for system dashboards) - - ``projects/[PROJECT_ID_OR_NUMBER]/dashboards/[DASHBOARD_ID]`` (for - custom dashboards). - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.monitoring_dashboard.v1.types.Dashboard` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "get_dashboard" not in self._inner_api_calls: - self._inner_api_calls[ - "get_dashboard" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.get_dashboard, - default_retry=self._method_configs["GetDashboard"].retry, - default_timeout=self._method_configs["GetDashboard"].timeout, - client_info=self._client_info, - ) - - request = dashboards_service_pb2.GetDashboardRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["get_dashboard"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def delete_dashboard( - self, - name, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Deletes an existing custom dashboard. - - This method requires the ``monitoring.dashboards.delete`` permission on - the specified dashboard. For more information, see `Google Cloud - IAM `__. - - Example: - >>> from google.cloud.monitoring_dashboard import v1 - >>> - >>> client = v1.DashboardsServiceClient() - >>> - >>> name = client.dashboard_path('[PROJECT]', '[DASHBOARD]') - >>> - >>> client.delete_dashboard(name) - - Args: - name (str): Required. The resource name of the Dashboard. The format is: - - :: - - projects/[PROJECT_ID_OR_NUMBER]/dashboards/[DASHBOARD_ID] - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "delete_dashboard" not in self._inner_api_calls: - self._inner_api_calls[ - "delete_dashboard" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.delete_dashboard, - default_retry=self._method_configs["DeleteDashboard"].retry, - default_timeout=self._method_configs["DeleteDashboard"].timeout, - client_info=self._client_info, - ) - - request = dashboards_service_pb2.DeleteDashboardRequest(name=name,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("name", name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - self._inner_api_calls["delete_dashboard"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) - - def update_dashboard( - self, - dashboard, - retry=google.api_core.gapic_v1.method.DEFAULT, - timeout=google.api_core.gapic_v1.method.DEFAULT, - metadata=None, - ): - """ - Replaces an existing custom dashboard with a new definition. - - This method requires the ``monitoring.dashboards.update`` permission on - the specified dashboard. For more information, see `Google Cloud - IAM `__. - - Example: - >>> from google.cloud.monitoring_dashboard import v1 - >>> - >>> client = v1.DashboardsServiceClient() - >>> - >>> # TODO: Initialize `dashboard`: - >>> dashboard = {} - >>> - >>> response = client.update_dashboard(dashboard) - - Args: - dashboard (Union[dict, ~google.cloud.monitoring_dashboard.v1.types.Dashboard]): Required. The dashboard that will replace the existing dashboard. - - If a dict is provided, it must be of the same form as the protobuf - message :class:`~google.cloud.monitoring_dashboard.v1.types.Dashboard` - retry (Optional[google.api_core.retry.Retry]): A retry object used - to retry requests. If ``None`` is specified, requests will - be retried using a default configuration. - timeout (Optional[float]): The amount of time, in seconds, to wait - for the request to complete. Note that if ``retry`` is - specified, the timeout applies to each individual attempt. - metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata - that is provided to the method. - - Returns: - A :class:`~google.cloud.monitoring_dashboard.v1.types.Dashboard` instance. - - Raises: - google.api_core.exceptions.GoogleAPICallError: If the request - failed for any reason. - google.api_core.exceptions.RetryError: If the request failed due - to a retryable error and retry attempts failed. - ValueError: If the parameters are invalid. - """ - # Wrap the transport method to add retry and timeout logic. - if "update_dashboard" not in self._inner_api_calls: - self._inner_api_calls[ - "update_dashboard" - ] = google.api_core.gapic_v1.method.wrap_method( - self.transport.update_dashboard, - default_retry=self._method_configs["UpdateDashboard"].retry, - default_timeout=self._method_configs["UpdateDashboard"].timeout, - client_info=self._client_info, - ) - - request = dashboards_service_pb2.UpdateDashboardRequest(dashboard=dashboard,) - if metadata is None: - metadata = [] - metadata = list(metadata) - try: - routing_header = [("dashboard.name", dashboard.name)] - except AttributeError: - pass - else: - routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata( - routing_header - ) - metadata.append(routing_metadata) - - return self._inner_api_calls["update_dashboard"]( - request, retry=retry, timeout=timeout, metadata=metadata - ) diff --git a/google/cloud/monitoring_dashboard/v1/gapic/dashboards_service_client_config.py b/google/cloud/monitoring_dashboard/v1/gapic/dashboards_service_client_config.py deleted file mode 100644 index a649ef0..0000000 --- a/google/cloud/monitoring_dashboard/v1/gapic/dashboards_service_client_config.py +++ /dev/null @@ -1,48 +0,0 @@ -config = { - "interfaces": { - "google.monitoring.dashboard.v1.DashboardsService": { - "retry_codes": { - "idempotent": ["DEADLINE_EXCEEDED", "UNAVAILABLE"], - "non_idempotent": [], - }, - "retry_params": { - "default": { - "initial_retry_delay_millis": 100, - "retry_delay_multiplier": 1.3, - "max_retry_delay_millis": 60000, - "initial_rpc_timeout_millis": 20000, - "rpc_timeout_multiplier": 1.0, - "max_rpc_timeout_millis": 20000, - "total_timeout_millis": 600000, - } - }, - "methods": { - "CreateDashboard": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "ListDashboards": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "GetDashboard": { - "timeout_millis": 60000, - "retry_codes_name": "idempotent", - "retry_params_name": "default", - }, - "DeleteDashboard": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - "UpdateDashboard": { - "timeout_millis": 60000, - "retry_codes_name": "non_idempotent", - "retry_params_name": "default", - }, - }, - } - } -} diff --git a/google/cloud/monitoring_dashboard/v1/gapic/enums.py b/google/cloud/monitoring_dashboard/v1/gapic/enums.py deleted file mode 100644 index 3824010..0000000 --- a/google/cloud/monitoring_dashboard/v1/gapic/enums.py +++ /dev/null @@ -1,435 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Wrappers for protocol buffer enum types.""" - -import enum - - -class SparkChartType(enum.IntEnum): - """ - Defines the possible types of spark chart supported by the - ``Scorecard``. - - Attributes: - SPARK_CHART_TYPE_UNSPECIFIED (int): Not allowed in well-formed requests. - SPARK_LINE (int): The sparkline will be rendered as a small line chart. - SPARK_BAR (int): The sparkbar will be rendered as a small bar chart. - """ - - SPARK_CHART_TYPE_UNSPECIFIED = 0 - SPARK_LINE = 1 - SPARK_BAR = 2 - - -class Aggregation(object): - class Aligner(enum.IntEnum): - """ - The ``Aligner`` specifies the operation that will be applied to the - data points in each alignment period in a time series. Except for - ``ALIGN_NONE``, which specifies that no operation be applied, each - alignment operation replaces the set of data values in each alignment - period with a single value: the result of applying the operation to the - data values. An aligned time series has a single data value at the end - of each ``alignment_period``. - - An alignment operation can change the data type of the values, too. For - example, if you apply a counting operation to boolean values, the data - ``value_type`` in the original time series is ``BOOLEAN``, but the - ``value_type`` in the aligned result is ``INT64``. - - Attributes: - ALIGN_NONE (int): No alignment. Raw data is returned. Not valid if cross-series - reduction is requested. The ``value_type`` of the result is the same as - the ``value_type`` of the input. - ALIGN_DELTA (int): Align and convert to ``DELTA``. The output is ``delta = y1 - y0``. - - This alignment is valid for ``CUMULATIVE`` and ``DELTA`` metrics. If the - selected alignment period results in periods with no data, then the - aligned value for such a period is created by interpolation. The - ``value_type`` of the aligned result is the same as the ``value_type`` - of the input. - ALIGN_RATE (int): Align and convert to a rate. The result is computed as - ``rate = (y1 - y0)/(t1 - t0)``, or "delta over time". Think of this - aligner as providing the slope of the line that passes through the value - at the start and at the end of the ``alignment_period``. - - This aligner is valid for ``CUMULATIVE`` and ``DELTA`` metrics with - numeric values. If the selected alignment period results in periods with - no data, then the aligned value for such a period is created by - interpolation. The output is a ``GAUGE`` metric with ``value_type`` - ``DOUBLE``. - - If, by "rate", you mean "percentage change", see the - ``ALIGN_PERCENT_CHANGE`` aligner instead. - ALIGN_INTERPOLATE (int): Align by interpolating between adjacent points around the alignment - period boundary. This aligner is valid for ``GAUGE`` metrics with - numeric values. The ``value_type`` of the aligned result is the same as - the ``value_type`` of the input. - ALIGN_NEXT_OLDER (int): Align by moving the most recent data point before the end of the - alignment period to the boundary at the end of the alignment period. - This aligner is valid for ``GAUGE`` metrics. The ``value_type`` of the - aligned result is the same as the ``value_type`` of the input. - ALIGN_MIN (int): Align the time series by returning the minimum value in each - alignment period. This aligner is valid for ``GAUGE`` and ``DELTA`` - metrics with numeric values. The ``value_type`` of the aligned result is - the same as the ``value_type`` of the input. - ALIGN_MAX (int): Align the time series by returning the maximum value in each - alignment period. This aligner is valid for ``GAUGE`` and ``DELTA`` - metrics with numeric values. The ``value_type`` of the aligned result is - the same as the ``value_type`` of the input. - ALIGN_MEAN (int): Align the time series by returning the mean value in each alignment - period. This aligner is valid for ``GAUGE`` and ``DELTA`` metrics with - numeric values. The ``value_type`` of the aligned result is ``DOUBLE``. - ALIGN_COUNT (int): Align the time series by returning the number of values in each - alignment period. This aligner is valid for ``GAUGE`` and ``DELTA`` - metrics with numeric or Boolean values. The ``value_type`` of the - aligned result is ``INT64``. - ALIGN_SUM (int): Align the time series by returning the sum of the values in each - alignment period. This aligner is valid for ``GAUGE`` and ``DELTA`` - metrics with numeric and distribution values. The ``value_type`` of the - aligned result is the same as the ``value_type`` of the input. - ALIGN_STDDEV (int): Align the time series by returning the standard deviation of the - values in each alignment period. This aligner is valid for ``GAUGE`` and - ``DELTA`` metrics with numeric values. The ``value_type`` of the output - is ``DOUBLE``. - ALIGN_COUNT_TRUE (int): Align the time series by returning the number of ``True`` values in - each alignment period. This aligner is valid for ``GAUGE`` metrics with - Boolean values. The ``value_type`` of the output is ``INT64``. - ALIGN_COUNT_FALSE (int): Align the time series by returning the number of ``False`` values in - each alignment period. This aligner is valid for ``GAUGE`` metrics with - Boolean values. The ``value_type`` of the output is ``INT64``. - ALIGN_FRACTION_TRUE (int): Align the time series by returning the ratio of the number of - ``True`` values to the total number of values in each alignment period. - This aligner is valid for ``GAUGE`` metrics with Boolean values. The - output value is in the range [0.0, 1.0] and has ``value_type`` - ``DOUBLE``. - ALIGN_PERCENTILE_99 (int): Align the time series by using `percentile - aggregation `__. The resulting - data point in each alignment period is the 99th percentile of all data - points in the period. This aligner is valid for ``GAUGE`` and ``DELTA`` - metrics with distribution values. The output is a ``GAUGE`` metric with - ``value_type`` ``DOUBLE``. - ALIGN_PERCENTILE_95 (int): Align the time series by using `percentile - aggregation `__. The resulting - data point in each alignment period is the 95th percentile of all data - points in the period. This aligner is valid for ``GAUGE`` and ``DELTA`` - metrics with distribution values. The output is a ``GAUGE`` metric with - ``value_type`` ``DOUBLE``. - ALIGN_PERCENTILE_50 (int): Align the time series by using `percentile - aggregation `__. The resulting - data point in each alignment period is the 50th percentile of all data - points in the period. This aligner is valid for ``GAUGE`` and ``DELTA`` - metrics with distribution values. The output is a ``GAUGE`` metric with - ``value_type`` ``DOUBLE``. - ALIGN_PERCENTILE_05 (int): Align the time series by using `percentile - aggregation `__. The resulting - data point in each alignment period is the 5th percentile of all data - points in the period. This aligner is valid for ``GAUGE`` and ``DELTA`` - metrics with distribution values. The output is a ``GAUGE`` metric with - ``value_type`` ``DOUBLE``. - ALIGN_PERCENT_CHANGE (int): Align and convert to a percentage change. This aligner is valid for - ``GAUGE`` and ``DELTA`` metrics with numeric values. This alignment - returns ``((current - previous)/previous) * 100``, where the value of - ``previous`` is determined based on the ``alignment_period``. - - If the values of ``current`` and ``previous`` are both 0, then the - returned value is 0. If only ``previous`` is 0, the returned value is - infinity. - - A 10-minute moving mean is computed at each point of the alignment - period prior to the above calculation to smooth the metric and prevent - false positives from very short-lived spikes. The moving mean is only - applicable for data whose values are ``>= 0``. Any values ``< 0`` are - treated as a missing datapoint, and are ignored. While ``DELTA`` metrics - are accepted by this alignment, special care should be taken that the - values for the metric will always be positive. The output is a ``GAUGE`` - metric with ``value_type`` ``DOUBLE``. - """ - - ALIGN_NONE = 0 - ALIGN_DELTA = 1 - ALIGN_RATE = 2 - ALIGN_INTERPOLATE = 3 - ALIGN_NEXT_OLDER = 4 - ALIGN_MIN = 10 - ALIGN_MAX = 11 - ALIGN_MEAN = 12 - ALIGN_COUNT = 13 - ALIGN_SUM = 14 - ALIGN_STDDEV = 15 - ALIGN_COUNT_TRUE = 16 - ALIGN_COUNT_FALSE = 24 - ALIGN_FRACTION_TRUE = 17 - ALIGN_PERCENTILE_99 = 18 - ALIGN_PERCENTILE_95 = 19 - ALIGN_PERCENTILE_50 = 20 - ALIGN_PERCENTILE_05 = 21 - ALIGN_PERCENT_CHANGE = 23 - - class Reducer(enum.IntEnum): - """ - A Reducer operation describes how to aggregate data points from multiple - time series into a single time series, where the value of each data point - in the resulting series is a function of all the already aligned values in - the input time series. - - Attributes: - REDUCE_NONE (int): No cross-time series reduction. The output of the ``Aligner`` is - returned. - REDUCE_MEAN (int): Reduce by computing the mean value across time series for each - alignment period. This reducer is valid for ``DELTA`` and ``GAUGE`` - metrics with numeric or distribution values. The ``value_type`` of the - output is ``DOUBLE``. - REDUCE_MIN (int): Reduce by computing the minimum value across time series for each - alignment period. This reducer is valid for ``DELTA`` and ``GAUGE`` - metrics with numeric values. The ``value_type`` of the output is the - same as the ``value_type`` of the input. - REDUCE_MAX (int): Reduce by computing the maximum value across time series for each - alignment period. This reducer is valid for ``DELTA`` and ``GAUGE`` - metrics with numeric values. The ``value_type`` of the output is the - same as the ``value_type`` of the input. - REDUCE_SUM (int): Reduce by computing the sum across time series for each alignment - period. This reducer is valid for ``DELTA`` and ``GAUGE`` metrics with - numeric and distribution values. The ``value_type`` of the output is the - same as the ``value_type`` of the input. - REDUCE_STDDEV (int): Reduce by computing the standard deviation across time series for - each alignment period. This reducer is valid for ``DELTA`` and ``GAUGE`` - metrics with numeric or distribution values. The ``value_type`` of the - output is ``DOUBLE``. - REDUCE_COUNT (int): Reduce by computing the number of data points across time series for - each alignment period. This reducer is valid for ``DELTA`` and ``GAUGE`` - metrics of numeric, Boolean, distribution, and string ``value_type``. - The ``value_type`` of the output is ``INT64``. - REDUCE_COUNT_TRUE (int): Reduce by computing the number of ``True``-valued data points across - time series for each alignment period. This reducer is valid for - ``DELTA`` and ``GAUGE`` metrics of Boolean ``value_type``. The - ``value_type`` of the output is ``INT64``. - REDUCE_COUNT_FALSE (int): Reduce by computing the number of ``False``-valued data points - across time series for each alignment period. This reducer is valid for - ``DELTA`` and ``GAUGE`` metrics of Boolean ``value_type``. The - ``value_type`` of the output is ``INT64``. - REDUCE_FRACTION_TRUE (int): Reduce by computing the ratio of the number of ``True``-valued data - points to the total number of data points for each alignment period. - This reducer is valid for ``DELTA`` and ``GAUGE`` metrics of Boolean - ``value_type``. The output value is in the range [0.0, 1.0] and has - ``value_type`` ``DOUBLE``. - REDUCE_PERCENTILE_99 (int): Reduce by computing the `99th - percentile `__ of data points - across time series for each alignment period. This reducer is valid for - ``GAUGE`` and ``DELTA`` metrics of numeric and distribution type. The - value of the output is ``DOUBLE``. - REDUCE_PERCENTILE_95 (int): Reduce by computing the `95th - percentile `__ of data points - across time series for each alignment period. This reducer is valid for - ``GAUGE`` and ``DELTA`` metrics of numeric and distribution type. The - value of the output is ``DOUBLE``. - REDUCE_PERCENTILE_50 (int): Reduce by computing the `50th - percentile `__ of data points - across time series for each alignment period. This reducer is valid for - ``GAUGE`` and ``DELTA`` metrics of numeric and distribution type. The - value of the output is ``DOUBLE``. - REDUCE_PERCENTILE_05 (int): Reduce by computing the `5th - percentile `__ of data points - across time series for each alignment period. This reducer is valid for - ``GAUGE`` and ``DELTA`` metrics of numeric and distribution type. The - value of the output is ``DOUBLE``. - """ - - REDUCE_NONE = 0 - REDUCE_MEAN = 1 - REDUCE_MIN = 2 - REDUCE_MAX = 3 - REDUCE_SUM = 4 - REDUCE_STDDEV = 5 - REDUCE_COUNT = 6 - REDUCE_COUNT_TRUE = 7 - REDUCE_COUNT_FALSE = 15 - REDUCE_FRACTION_TRUE = 8 - REDUCE_PERCENTILE_99 = 9 - REDUCE_PERCENTILE_95 = 10 - REDUCE_PERCENTILE_50 = 11 - REDUCE_PERCENTILE_05 = 12 - - -class ChartOptions(object): - class Mode(enum.IntEnum): - """ - Chart mode options. - - Attributes: - MODE_UNSPECIFIED (int): Mode is unspecified. The view will default to ``COLOR``. - COLOR (int): The chart distinguishes data series using different color. Line - colors may get reused when there are many lines in the chart. - X_RAY (int): The chart uses the Stackdriver x-ray mode, in which each - data set is plotted using the same semi-transparent color. - STATS (int): The chart displays statistics such as average, median, 95th percentile, - and more. - """ - - MODE_UNSPECIFIED = 0 - COLOR = 1 - X_RAY = 2 - STATS = 3 - - -class PickTimeSeriesFilter(object): - class Direction(enum.IntEnum): - """ - Describes the ranking directions. - - Attributes: - DIRECTION_UNSPECIFIED (int): Not allowed. You must specify a different ``Direction`` if you - specify a ``PickTimeSeriesFilter``. - TOP (int): Pass the highest ``num_time_series`` ranking inputs. - BOTTOM (int): Pass the lowest ``num_time_series`` ranking inputs. - """ - - DIRECTION_UNSPECIFIED = 0 - TOP = 1 - BOTTOM = 2 - - class Method(enum.IntEnum): - """ - The value reducers that can be applied to a - ``PickTimeSeriesFilter``. - - Attributes: - METHOD_UNSPECIFIED (int): Not allowed. You must specify a different ``Method`` if you specify - a ``PickTimeSeriesFilter``. - METHOD_MEAN (int): Select the mean of all values. - METHOD_MAX (int): Select the maximum value. - METHOD_MIN (int): Select the minimum value. - METHOD_SUM (int): Compute the sum of all values. - METHOD_LATEST (int): Select the most recent value. - """ - - METHOD_UNSPECIFIED = 0 - METHOD_MEAN = 1 - METHOD_MAX = 2 - METHOD_MIN = 3 - METHOD_SUM = 4 - METHOD_LATEST = 5 - - -class StatisticalTimeSeriesFilter(object): - class Method(enum.IntEnum): - """ - The filter methods that can be applied to a stream. - - Attributes: - METHOD_UNSPECIFIED (int): Not allowed in well-formed requests. - METHOD_CLUSTER_OUTLIER (int): Compute the outlier score of each stream. - """ - - METHOD_UNSPECIFIED = 0 - METHOD_CLUSTER_OUTLIER = 1 - - -class Text(object): - class Format(enum.IntEnum): - """ - The format type of the text content. - - Attributes: - FORMAT_UNSPECIFIED (int): Format is unspecified. Defaults to MARKDOWN. - MARKDOWN (int): The text contains Markdown formatting. - RAW (int): The text contains no special formatting. - """ - - FORMAT_UNSPECIFIED = 0 - MARKDOWN = 1 - RAW = 2 - - -class Threshold(object): - class Color(enum.IntEnum): - """ - The color suggests an interpretation to the viewer when actual values cross - the threshold. Comments on each color provide UX guidance on how users can - be expected to interpret a given state color. - - Attributes: - COLOR_UNSPECIFIED (int): Color is unspecified. Not allowed in well-formed requests. - YELLOW (int): Crossing the threshold is "concerning" behavior. - RED (int): Crossing the threshold is "emergency" behavior. - """ - - COLOR_UNSPECIFIED = 0 - YELLOW = 4 - RED = 6 - - class Direction(enum.IntEnum): - """ - Whether the threshold is considered crossed by an actual value above or - below its threshold value. - - Attributes: - DIRECTION_UNSPECIFIED (int): Not allowed in well-formed requests. - ABOVE (int): The threshold will be considered crossed if the actual value is above - the threshold value. - BELOW (int): The threshold will be considered crossed if the actual value is below - the threshold value. - """ - - DIRECTION_UNSPECIFIED = 0 - ABOVE = 1 - BELOW = 2 - - -class XyChart(object): - class DataSet(object): - class PlotType(enum.IntEnum): - """ - The types of plotting strategies for data sets. - - Attributes: - PLOT_TYPE_UNSPECIFIED (int): Plot type is unspecified. The view will default to ``LINE``. - LINE (int): The data is plotted as a set of lines (one line per series). - STACKED_AREA (int): The data is plotted as a set of filled areas (one area per series), - with the areas stacked vertically (the base of each area is the top of - its predecessor, and the base of the first area is the X axis). Since - the areas do not overlap, each is filled with a different opaque color. - STACKED_BAR (int): The data is plotted as a set of rectangular boxes (one box per series), - with the boxes stacked vertically (the base of each box is the top of - its predecessor, and the base of the first box is the X axis). Since - the boxes do not overlap, each is filled with a different opaque color. - HEATMAP (int): The data is plotted as a heatmap. The series being plotted must have - a ``DISTRIBUTION`` value type. The value of each bucket in the - distribution is displayed as a color. This type is not currently - available in the Stackdriver Monitoring application. - """ - - PLOT_TYPE_UNSPECIFIED = 0 - LINE = 1 - STACKED_AREA = 2 - STACKED_BAR = 3 - HEATMAP = 4 - - class Axis(object): - class Scale(enum.IntEnum): - """ - Types of scales used in axes. - - Attributes: - SCALE_UNSPECIFIED (int): Scale is unspecified. The view will default to ``LINEAR``. - LINEAR (int): Linear scale. - LOG10 (int): Logarithmic scale (base 10). - """ - - SCALE_UNSPECIFIED = 0 - LINEAR = 1 - LOG10 = 2 diff --git a/google/cloud/monitoring_dashboard/v1/gapic/transports/__init__.py b/google/cloud/monitoring_dashboard/v1/gapic/transports/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/google/cloud/monitoring_dashboard/v1/gapic/transports/dashboards_service_grpc_transport.py b/google/cloud/monitoring_dashboard/v1/gapic/transports/dashboards_service_grpc_transport.py deleted file mode 100644 index 9f49ebf..0000000 --- a/google/cloud/monitoring_dashboard/v1/gapic/transports/dashboards_service_grpc_transport.py +++ /dev/null @@ -1,200 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -import google.api_core.grpc_helpers - -from google.cloud.monitoring_dashboard.v1.proto import dashboards_service_pb2_grpc - - -class DashboardsServiceGrpcTransport(object): - """gRPC transport class providing stubs for - google.monitoring.dashboard.v1 DashboardsService API. - - The transport provides access to the raw gRPC stubs, - which can be used to take advantage of advanced - features of gRPC. - """ - - # The scopes needed to make gRPC calls to all of the methods defined - # in this service. - _OAUTH_SCOPES = ( - "https://www.googleapis.com/auth/cloud-platform", - "https://www.googleapis.com/auth/monitoring", - "https://www.googleapis.com/auth/monitoring.read", - "https://www.googleapis.com/auth/monitoring.write", - ) - - def __init__( - self, channel=None, credentials=None, address="monitoring.googleapis.com:443" - ): - """Instantiate the transport class. - - Args: - channel (grpc.Channel): A ``Channel`` instance through - which to make calls. This argument is mutually exclusive - with ``credentials``; providing both will raise an exception. - credentials (google.auth.credentials.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If none - are specified, the client will attempt to ascertain the - credentials from the environment. - address (str): The address where the service is hosted. - """ - # If both `channel` and `credentials` are specified, raise an - # exception (channels come with credentials baked in already). - if channel is not None and credentials is not None: - raise ValueError( - "The `channel` and `credentials` arguments are mutually " "exclusive.", - ) - - # Create the channel. - if channel is None: - channel = self.create_channel( - address=address, - credentials=credentials, - options={ - "grpc.max_send_message_length": -1, - "grpc.max_receive_message_length": -1, - }.items(), - ) - - self._channel = channel - - # gRPC uses objects called "stubs" that are bound to the - # channel and provide a basic method for each RPC. - self._stubs = { - "dashboards_service_stub": dashboards_service_pb2_grpc.DashboardsServiceStub( - channel - ), - } - - @classmethod - def create_channel( - cls, address="monitoring.googleapis.com:443", credentials=None, **kwargs - ): - """Create and return a gRPC channel object. - - Args: - address (str): The host for the channel to use. - credentials (~.Credentials): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - kwargs (dict): Keyword arguments, which are passed to the - channel creation. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return google.api_core.grpc_helpers.create_channel( - address, credentials=credentials, scopes=cls._OAUTH_SCOPES, **kwargs - ) - - @property - def channel(self): - """The gRPC channel used by the transport. - - Returns: - grpc.Channel: A gRPC channel object. - """ - return self._channel - - @property - def create_dashboard(self): - """Return the gRPC stub for :meth:`DashboardsServiceClient.create_dashboard`. - - Creates a new custom dashboard. - - This method requires the ``monitoring.dashboards.create`` permission on - the specified project. For more information, see `Google Cloud - IAM `__. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dashboards_service_stub"].CreateDashboard - - @property - def list_dashboards(self): - """Return the gRPC stub for :meth:`DashboardsServiceClient.list_dashboards`. - - Lists the existing dashboards. - - This method requires the ``monitoring.dashboards.list`` permission on - the specified project. For more information, see `Google Cloud - IAM `__. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dashboards_service_stub"].ListDashboards - - @property - def get_dashboard(self): - """Return the gRPC stub for :meth:`DashboardsServiceClient.get_dashboard`. - - Fetches a specific dashboard. - - This method requires the ``monitoring.dashboards.get`` permission on the - specified dashboard. For more information, see `Google Cloud - IAM `__. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dashboards_service_stub"].GetDashboard - - @property - def delete_dashboard(self): - """Return the gRPC stub for :meth:`DashboardsServiceClient.delete_dashboard`. - - Deletes an existing custom dashboard. - - This method requires the ``monitoring.dashboards.delete`` permission on - the specified dashboard. For more information, see `Google Cloud - IAM `__. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dashboards_service_stub"].DeleteDashboard - - @property - def update_dashboard(self): - """Return the gRPC stub for :meth:`DashboardsServiceClient.update_dashboard`. - - Replaces an existing custom dashboard with a new definition. - - This method requires the ``monitoring.dashboards.update`` permission on - the specified dashboard. For more information, see `Google Cloud - IAM `__. - - Returns: - Callable: A callable which accepts the appropriate - deserialized request object and returns a - deserialized response object. - """ - return self._stubs["dashboards_service_stub"].UpdateDashboard diff --git a/google/cloud/monitoring_dashboard/v1/proto/__init__.py b/google/cloud/monitoring_dashboard/v1/proto/__init__.py deleted file mode 100644 index e69de29..0000000 diff --git a/google/cloud/monitoring_dashboard/v1/proto/common_pb2.py b/google/cloud/monitoring_dashboard/v1/proto/common_pb2.py deleted file mode 100644 index 9d4a467..0000000 --- a/google/cloud/monitoring_dashboard/v1/proto/common_pb2.py +++ /dev/null @@ -1,891 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring_dashboard_v1/proto/common.proto - -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import distribution_pb2 as google_dot_api_dot_distribution__pb2 -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring_dashboard_v1/proto/common.proto", - package="google.monitoring.dashboard.v1", - syntax="proto3", - serialized_options=b'\n"com.google.monitoring.dashboard.v1B\013CommonProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\352\002(Google::Cloud::Monitoring::Dashboard::V1', - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n7google/cloud/monitoring_dashboard_v1/proto/common.proto\x12\x1egoogle.monitoring.dashboard.v1\x1a\x1dgoogle/api/distribution.proto\x1a\x1egoogle/protobuf/duration.proto"\xc1\x07\n\x0b\x41ggregation\x12\x33\n\x10\x61lignment_period\x18\x01 \x01(\x0b\x32\x19.google.protobuf.Duration\x12O\n\x12per_series_aligner\x18\x02 \x01(\x0e\x32\x33.google.monitoring.dashboard.v1.Aggregation.Aligner\x12Q\n\x14\x63ross_series_reducer\x18\x04 \x01(\x0e\x32\x33.google.monitoring.dashboard.v1.Aggregation.Reducer\x12\x17\n\x0fgroup_by_fields\x18\x05 \x03(\t"\x8b\x03\n\x07\x41ligner\x12\x0e\n\nALIGN_NONE\x10\x00\x12\x0f\n\x0b\x41LIGN_DELTA\x10\x01\x12\x0e\n\nALIGN_RATE\x10\x02\x12\x15\n\x11\x41LIGN_INTERPOLATE\x10\x03\x12\x14\n\x10\x41LIGN_NEXT_OLDER\x10\x04\x12\r\n\tALIGN_MIN\x10\n\x12\r\n\tALIGN_MAX\x10\x0b\x12\x0e\n\nALIGN_MEAN\x10\x0c\x12\x0f\n\x0b\x41LIGN_COUNT\x10\r\x12\r\n\tALIGN_SUM\x10\x0e\x12\x10\n\x0c\x41LIGN_STDDEV\x10\x0f\x12\x14\n\x10\x41LIGN_COUNT_TRUE\x10\x10\x12\x15\n\x11\x41LIGN_COUNT_FALSE\x10\x18\x12\x17\n\x13\x41LIGN_FRACTION_TRUE\x10\x11\x12\x17\n\x13\x41LIGN_PERCENTILE_99\x10\x12\x12\x17\n\x13\x41LIGN_PERCENTILE_95\x10\x13\x12\x17\n\x13\x41LIGN_PERCENTILE_50\x10\x14\x12\x17\n\x13\x41LIGN_PERCENTILE_05\x10\x15\x12\x18\n\x14\x41LIGN_PERCENT_CHANGE\x10\x17"\xb1\x02\n\x07Reducer\x12\x0f\n\x0bREDUCE_NONE\x10\x00\x12\x0f\n\x0bREDUCE_MEAN\x10\x01\x12\x0e\n\nREDUCE_MIN\x10\x02\x12\x0e\n\nREDUCE_MAX\x10\x03\x12\x0e\n\nREDUCE_SUM\x10\x04\x12\x11\n\rREDUCE_STDDEV\x10\x05\x12\x10\n\x0cREDUCE_COUNT\x10\x06\x12\x15\n\x11REDUCE_COUNT_TRUE\x10\x07\x12\x16\n\x12REDUCE_COUNT_FALSE\x10\x0f\x12\x18\n\x14REDUCE_FRACTION_TRUE\x10\x08\x12\x18\n\x14REDUCE_PERCENTILE_99\x10\t\x12\x18\n\x14REDUCE_PERCENTILE_95\x10\n\x12\x18\n\x14REDUCE_PERCENTILE_50\x10\x0b\x12\x18\n\x14REDUCE_PERCENTILE_05\x10\x0c"\x8a\x03\n\x14PickTimeSeriesFilter\x12S\n\x0eranking_method\x18\x01 \x01(\x0e\x32;.google.monitoring.dashboard.v1.PickTimeSeriesFilter.Method\x12\x17\n\x0fnum_time_series\x18\x02 \x01(\x05\x12Q\n\tdirection\x18\x03 \x01(\x0e\x32>.google.monitoring.dashboard.v1.PickTimeSeriesFilter.Direction"t\n\x06Method\x12\x16\n\x12METHOD_UNSPECIFIED\x10\x00\x12\x0f\n\x0bMETHOD_MEAN\x10\x01\x12\x0e\n\nMETHOD_MAX\x10\x02\x12\x0e\n\nMETHOD_MIN\x10\x03\x12\x0e\n\nMETHOD_SUM\x10\x04\x12\x11\n\rMETHOD_LATEST\x10\x05";\n\tDirection\x12\x19\n\x15\x44IRECTION_UNSPECIFIED\x10\x00\x12\x07\n\x03TOP\x10\x01\x12\n\n\x06\x42OTTOM\x10\x02"\xd0\x01\n\x1bStatisticalTimeSeriesFilter\x12Z\n\x0eranking_method\x18\x01 \x01(\x0e\x32\x42.google.monitoring.dashboard.v1.StatisticalTimeSeriesFilter.Method\x12\x17\n\x0fnum_time_series\x18\x02 \x01(\x05"<\n\x06Method\x12\x16\n\x12METHOD_UNSPECIFIED\x10\x00\x12\x1a\n\x16METHOD_CLUSTER_OUTLIER\x10\x01\x42\xa7\x01\n"com.google.monitoring.dashboard.v1B\x0b\x43ommonProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\xea\x02(Google::Cloud::Monitoring::Dashboard::V1b\x06proto3', - dependencies=[ - google_dot_api_dot_distribution__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - ], -) - - -_AGGREGATION_ALIGNER = _descriptor.EnumDescriptor( - name="Aligner", - full_name="google.monitoring.dashboard.v1.Aggregation.Aligner", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="ALIGN_NONE", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_DELTA", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_RATE", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_INTERPOLATE", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_NEXT_OLDER", - index=4, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_MIN", - index=5, - number=10, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_MAX", - index=6, - number=11, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_MEAN", - index=7, - number=12, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_COUNT", - index=8, - number=13, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_SUM", - index=9, - number=14, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_STDDEV", - index=10, - number=15, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_COUNT_TRUE", - index=11, - number=16, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_COUNT_FALSE", - index=12, - number=24, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_FRACTION_TRUE", - index=13, - number=17, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_PERCENTILE_99", - index=14, - number=18, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_PERCENTILE_95", - index=15, - number=19, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_PERCENTILE_50", - index=16, - number=20, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_PERCENTILE_05", - index=17, - number=21, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ALIGN_PERCENT_CHANGE", - index=18, - number=23, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=413, - serialized_end=808, -) -_sym_db.RegisterEnumDescriptor(_AGGREGATION_ALIGNER) - -_AGGREGATION_REDUCER = _descriptor.EnumDescriptor( - name="Reducer", - full_name="google.monitoring.dashboard.v1.Aggregation.Reducer", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="REDUCE_NONE", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="REDUCE_MEAN", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="REDUCE_MIN", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="REDUCE_MAX", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="REDUCE_SUM", - index=4, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="REDUCE_STDDEV", - index=5, - number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="REDUCE_COUNT", - index=6, - number=6, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="REDUCE_COUNT_TRUE", - index=7, - number=7, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="REDUCE_COUNT_FALSE", - index=8, - number=15, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="REDUCE_FRACTION_TRUE", - index=9, - number=8, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="REDUCE_PERCENTILE_99", - index=10, - number=9, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="REDUCE_PERCENTILE_95", - index=11, - number=10, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="REDUCE_PERCENTILE_50", - index=12, - number=11, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="REDUCE_PERCENTILE_05", - index=13, - number=12, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=811, - serialized_end=1116, -) -_sym_db.RegisterEnumDescriptor(_AGGREGATION_REDUCER) - -_PICKTIMESERIESFILTER_METHOD = _descriptor.EnumDescriptor( - name="Method", - full_name="google.monitoring.dashboard.v1.PickTimeSeriesFilter.Method", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="METHOD_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="METHOD_MEAN", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="METHOD_MAX", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="METHOD_MIN", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="METHOD_SUM", - index=4, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="METHOD_LATEST", - index=5, - number=5, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1336, - serialized_end=1452, -) -_sym_db.RegisterEnumDescriptor(_PICKTIMESERIESFILTER_METHOD) - -_PICKTIMESERIESFILTER_DIRECTION = _descriptor.EnumDescriptor( - name="Direction", - full_name="google.monitoring.dashboard.v1.PickTimeSeriesFilter.Direction", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="DIRECTION_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="TOP", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="BOTTOM", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1454, - serialized_end=1513, -) -_sym_db.RegisterEnumDescriptor(_PICKTIMESERIESFILTER_DIRECTION) - -_STATISTICALTIMESERIESFILTER_METHOD = _descriptor.EnumDescriptor( - name="Method", - full_name="google.monitoring.dashboard.v1.StatisticalTimeSeriesFilter.Method", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="METHOD_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="METHOD_CLUSTER_OUTLIER", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1664, - serialized_end=1724, -) -_sym_db.RegisterEnumDescriptor(_STATISTICALTIMESERIESFILTER_METHOD) - - -_AGGREGATION = _descriptor.Descriptor( - name="Aggregation", - full_name="google.monitoring.dashboard.v1.Aggregation", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="alignment_period", - full_name="google.monitoring.dashboard.v1.Aggregation.alignment_period", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="per_series_aligner", - full_name="google.monitoring.dashboard.v1.Aggregation.per_series_aligner", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="cross_series_reducer", - full_name="google.monitoring.dashboard.v1.Aggregation.cross_series_reducer", - index=2, - number=4, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="group_by_fields", - full_name="google.monitoring.dashboard.v1.Aggregation.group_by_fields", - index=3, - number=5, - type=9, - cpp_type=9, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_AGGREGATION_ALIGNER, _AGGREGATION_REDUCER,], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=155, - serialized_end=1116, -) - - -_PICKTIMESERIESFILTER = _descriptor.Descriptor( - name="PickTimeSeriesFilter", - full_name="google.monitoring.dashboard.v1.PickTimeSeriesFilter", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="ranking_method", - full_name="google.monitoring.dashboard.v1.PickTimeSeriesFilter.ranking_method", - index=0, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="num_time_series", - full_name="google.monitoring.dashboard.v1.PickTimeSeriesFilter.num_time_series", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="direction", - full_name="google.monitoring.dashboard.v1.PickTimeSeriesFilter.direction", - index=2, - number=3, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_PICKTIMESERIESFILTER_METHOD, _PICKTIMESERIESFILTER_DIRECTION,], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1119, - serialized_end=1513, -) - - -_STATISTICALTIMESERIESFILTER = _descriptor.Descriptor( - name="StatisticalTimeSeriesFilter", - full_name="google.monitoring.dashboard.v1.StatisticalTimeSeriesFilter", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="ranking_method", - full_name="google.monitoring.dashboard.v1.StatisticalTimeSeriesFilter.ranking_method", - index=0, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="num_time_series", - full_name="google.monitoring.dashboard.v1.StatisticalTimeSeriesFilter.num_time_series", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_STATISTICALTIMESERIESFILTER_METHOD,], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1516, - serialized_end=1724, -) - -_AGGREGATION.fields_by_name[ - "alignment_period" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_AGGREGATION.fields_by_name["per_series_aligner"].enum_type = _AGGREGATION_ALIGNER -_AGGREGATION.fields_by_name["cross_series_reducer"].enum_type = _AGGREGATION_REDUCER -_AGGREGATION_ALIGNER.containing_type = _AGGREGATION -_AGGREGATION_REDUCER.containing_type = _AGGREGATION -_PICKTIMESERIESFILTER.fields_by_name[ - "ranking_method" -].enum_type = _PICKTIMESERIESFILTER_METHOD -_PICKTIMESERIESFILTER.fields_by_name[ - "direction" -].enum_type = _PICKTIMESERIESFILTER_DIRECTION -_PICKTIMESERIESFILTER_METHOD.containing_type = _PICKTIMESERIESFILTER -_PICKTIMESERIESFILTER_DIRECTION.containing_type = _PICKTIMESERIESFILTER -_STATISTICALTIMESERIESFILTER.fields_by_name[ - "ranking_method" -].enum_type = _STATISTICALTIMESERIESFILTER_METHOD -_STATISTICALTIMESERIESFILTER_METHOD.containing_type = _STATISTICALTIMESERIESFILTER -DESCRIPTOR.message_types_by_name["Aggregation"] = _AGGREGATION -DESCRIPTOR.message_types_by_name["PickTimeSeriesFilter"] = _PICKTIMESERIESFILTER -DESCRIPTOR.message_types_by_name[ - "StatisticalTimeSeriesFilter" -] = _STATISTICALTIMESERIESFILTER -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Aggregation = _reflection.GeneratedProtocolMessageType( - "Aggregation", - (_message.Message,), - { - "DESCRIPTOR": _AGGREGATION, - "__module__": "google.cloud.monitoring_dashboard.v1.proto.common_pb2", - "__doc__": """Describes how to combine multiple time series to provide a different - view of the data. Aggregation of time series is done in two steps. - First, each time series in the set is *aligned* to the same time - interval boundaries, then the set of time series is optionally - *reduced* in number. Alignment consists of applying the - ``per_series_aligner`` operation to each time series after its data - has been divided into regular ``alignment_period`` time intervals. - This process takes *all* of the data points in an alignment period, - applies a mathematical transformation such as averaging, minimum, - maximum, delta, etc., and converts them into a single data point per - period. Reduction is when the aligned and transformed time series can - optionally be combined, reducing the number of time series through - similar mathematical transformations. Reduction involves applying a - ``cross_series_reducer`` to all the time series, optionally sorting - the time series into subsets with ``group_by_fields``, and applying - the reducer to each subset. The raw time series data can contain a - huge amount of information from multiple sources. Alignment and - reduction transforms this mass of data into a more manageable and - representative collection of data, for example “the 95% latency across - the average of all tasks in a cluster”. This representative data can - be more easily graphed and comprehended, and the individual time - series data is still available for later drilldown. For more details, - see `Filtering and aggregation - `__. - - Attributes: - alignment_period: - The ``alignment_period`` specifies a time interval, in - seconds, that is used to divide the data in all the [time - series][google.monitoring.v3.TimeSeries] into consistent - blocks of time. This will be done before the per-series - aligner can be applied to the data. The value must be at - least 60 seconds. If a per-series aligner other than - ``ALIGN_NONE`` is specified, this field is required or an - error is returned. If no per-series aligner is specified, or - the aligner ``ALIGN_NONE`` is specified, then this field is - ignored. - per_series_aligner: - An ``Aligner`` describes how to bring the data points in a - single time series into temporal alignment. Except for - ``ALIGN_NONE``, all alignments cause all the data points in an - ``alignment_period`` to be mathematically grouped together, - resulting in a single data point for each ``alignment_period`` - with end timestamp at the end of the period. Not all - alignment operations may be applied to all time series. The - valid choices depend on the ``metric_kind`` and ``value_type`` - of the original time series. Alignment can change the - ``metric_kind`` or the ``value_type`` of the time series. - Time series data must be aligned in order to perform cross- - time series reduction. If ``cross_series_reducer`` is - specified, then ``per_series_aligner`` must be specified and - not equal to ``ALIGN_NONE`` and ``alignment_period`` must be - specified; otherwise, an error is returned. - cross_series_reducer: - The reduction operation to be used to combine time series into - a single time series, where the value of each data point in - the resulting series is a function of all the already aligned - values in the input time series. Not all reducer operations - can be applied to all time series. The valid choices depend on - the ``metric_kind`` and the ``value_type`` of the original - time series. Reduction can yield a time series with a - different ``metric_kind`` or ``value_type`` than the input - time series. Time series data must first be aligned (see - ``per_series_aligner``) in order to perform cross-time series - reduction. If ``cross_series_reducer`` is specified, then - ``per_series_aligner`` must be specified, and must not be - ``ALIGN_NONE``. An ``alignment_period`` must also be - specified; otherwise, an error is returned. - group_by_fields: - The set of fields to preserve when ``cross_series_reducer`` is - specified. The ``group_by_fields`` determine how the time - series are partitioned into subsets prior to applying the - aggregation operation. Each subset contains time series that - have the same value for each of the grouping fields. Each - individual time series is a member of exactly one subset. The - ``cross_series_reducer`` is applied to each subset of time - series. It is not possible to reduce across different resource - types, so this field implicitly contains ``resource.type``. - Fields not specified in ``group_by_fields`` are aggregated - away. If ``group_by_fields`` is not specified and all the time - series have the same resource type, then the time series are - aggregated into a single output time series. If - ``cross_series_reducer`` is not defined, this field is - ignored. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.Aggregation) - }, -) -_sym_db.RegisterMessage(Aggregation) - -PickTimeSeriesFilter = _reflection.GeneratedProtocolMessageType( - "PickTimeSeriesFilter", - (_message.Message,), - { - "DESCRIPTOR": _PICKTIMESERIESFILTER, - "__module__": "google.cloud.monitoring_dashboard.v1.proto.common_pb2", - "__doc__": """Describes a ranking-based time series filter. Each input time series - is ranked with an aligner. The filter will allow up to - ``num_time_series`` time series to pass through it, selecting them - based on the relative ranking. For example, if ``ranking_method`` is - ``METHOD_MEAN``,\ ``direction`` is ``BOTTOM``, and ``num_time_series`` - is 3, then the 3 times series with the lowest mean values will pass - through the filter. - - Attributes: - ranking_method: - \ ``ranking_method`` is applied to each time series - independently to produce the value which will be used to - compare the time series to other time series. - num_time_series: - How many time series to allow to pass through the filter. - direction: - How to use the ranking to select time series that pass through - the filter. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.PickTimeSeriesFilter) - }, -) -_sym_db.RegisterMessage(PickTimeSeriesFilter) - -StatisticalTimeSeriesFilter = _reflection.GeneratedProtocolMessageType( - "StatisticalTimeSeriesFilter", - (_message.Message,), - { - "DESCRIPTOR": _STATISTICALTIMESERIESFILTER, - "__module__": "google.cloud.monitoring_dashboard.v1.proto.common_pb2", - "__doc__": """A filter that ranks streams based on their statistical relation to - other streams in a request. Note: This field is deprecated and - completely ignored by the API. - - Attributes: - ranking_method: - \ ``rankingMethod`` is applied to a set of time series, and - then the produced value for each individual time series is - used to compare a given time series to others. These are - methods that cannot be applied stream-by-stream, but rather - require the full context of a request to evaluate time series. - num_time_series: - How many time series to output. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.StatisticalTimeSeriesFilter) - }, -) -_sym_db.RegisterMessage(StatisticalTimeSeriesFilter) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/monitoring_dashboard/v1/proto/dashboard_pb2.py b/google/cloud/monitoring_dashboard/v1/proto/dashboard_pb2.py deleted file mode 100644 index b137429..0000000 --- a/google/cloud/monitoring_dashboard/v1/proto/dashboard_pb2.py +++ /dev/null @@ -1,278 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring_dashboard_v1/proto/dashboard.proto - -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.cloud.monitoring_dashboard.v1.proto import ( - layouts_pb2 as google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_layouts__pb2, -) - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring_dashboard_v1/proto/dashboard.proto", - package="google.monitoring.dashboard.v1", - syntax="proto3", - serialized_options=b'\n"com.google.monitoring.dashboard.v1B\017DashboardsProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\352\002(Google::Cloud::Monitoring::Dashboard::V1', - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n:google/cloud/monitoring_dashboard_v1/proto/dashboard.proto\x12\x1egoogle.monitoring.dashboard.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a\x38google/cloud/monitoring_dashboard_v1/proto/layouts.proto"\xf1\x02\n\tDashboard\x12\x11\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x05\x12\x19\n\x0c\x64isplay_name\x18\x02 \x01(\tB\x03\xe0\x41\x02\x12\x0c\n\x04\x65tag\x18\x04 \x01(\t\x12\x41\n\x0bgrid_layout\x18\x05 \x01(\x0b\x32*.google.monitoring.dashboard.v1.GridLayoutH\x00\x12?\n\nrow_layout\x18\x08 \x01(\x0b\x32).google.monitoring.dashboard.v1.RowLayoutH\x00\x12\x45\n\rcolumn_layout\x18\t \x01(\x0b\x32,.google.monitoring.dashboard.v1.ColumnLayoutH\x00:S\xea\x41P\n#monitoring.googleapis.com/Dashboard\x12)projects/{project}/dashboards/{dashboard}B\x08\n\x06layoutB\xab\x01\n"com.google.monitoring.dashboard.v1B\x0f\x44\x61shboardsProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\xea\x02(Google::Cloud::Monitoring::Dashboard::V1b\x06proto3', - dependencies=[ - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_layouts__pb2.DESCRIPTOR, - ], -) - - -_DASHBOARD = _descriptor.Descriptor( - name="Dashboard", - full_name="google.monitoring.dashboard.v1.Dashboard", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.dashboard.v1.Dashboard.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\005", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="display_name", - full_name="google.monitoring.dashboard.v1.Dashboard.display_name", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="etag", - full_name="google.monitoring.dashboard.v1.Dashboard.etag", - index=2, - number=4, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="grid_layout", - full_name="google.monitoring.dashboard.v1.Dashboard.grid_layout", - index=3, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="row_layout", - full_name="google.monitoring.dashboard.v1.Dashboard.row_layout", - index=4, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="column_layout", - full_name="google.monitoring.dashboard.v1.Dashboard.column_layout", - index=5, - number=9, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=b"\352AP\n#monitoring.googleapis.com/Dashboard\022)projects/{project}/dashboards/{dashboard}", - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="layout", - full_name="google.monitoring.dashboard.v1.Dashboard.layout", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=213, - serialized_end=582, -) - -_DASHBOARD.fields_by_name[ - "grid_layout" -].message_type = ( - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_layouts__pb2._GRIDLAYOUT -) -_DASHBOARD.fields_by_name[ - "row_layout" -].message_type = ( - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_layouts__pb2._ROWLAYOUT -) -_DASHBOARD.fields_by_name[ - "column_layout" -].message_type = ( - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_layouts__pb2._COLUMNLAYOUT -) -_DASHBOARD.oneofs_by_name["layout"].fields.append( - _DASHBOARD.fields_by_name["grid_layout"] -) -_DASHBOARD.fields_by_name["grid_layout"].containing_oneof = _DASHBOARD.oneofs_by_name[ - "layout" -] -_DASHBOARD.oneofs_by_name["layout"].fields.append( - _DASHBOARD.fields_by_name["row_layout"] -) -_DASHBOARD.fields_by_name["row_layout"].containing_oneof = _DASHBOARD.oneofs_by_name[ - "layout" -] -_DASHBOARD.oneofs_by_name["layout"].fields.append( - _DASHBOARD.fields_by_name["column_layout"] -) -_DASHBOARD.fields_by_name["column_layout"].containing_oneof = _DASHBOARD.oneofs_by_name[ - "layout" -] -DESCRIPTOR.message_types_by_name["Dashboard"] = _DASHBOARD -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Dashboard = _reflection.GeneratedProtocolMessageType( - "Dashboard", - (_message.Message,), - { - "DESCRIPTOR": _DASHBOARD, - "__module__": "google.cloud.monitoring_dashboard.v1.proto.dashboard_pb2", - "__doc__": """A Google Stackdriver dashboard. Dashboards define the content and - layout of pages in the Stackdriver web application. - - Attributes: - name: - Immutable. The resource name of the dashboard. - display_name: - Required. The mutable, human-readable name. - etag: - \ ``etag`` is used for optimistic concurrency control as a way - to help prevent simultaneous updates of a policy from - overwriting each other. An ``etag`` is returned in the - response to ``GetDashboard``, and users are expected to put - that etag in the request to ``UpdateDashboard`` to ensure that - their change will be applied to the same version of the - Dashboard configuration. The field should not be passed during - dashboard creation. - layout: - A dashboard’s root container element that defines the layout - style. - grid_layout: - Content is arranged with a basic layout that re-flows a simple - list of informational elements like widgets or tiles. - row_layout: - The content is divided into equally spaced rows and the - widgets are arranged horizontally. - column_layout: - The content is divided into equally spaced columns and the - widgets are arranged vertically. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.Dashboard) - }, -) -_sym_db.RegisterMessage(Dashboard) - - -DESCRIPTOR._options = None -_DASHBOARD.fields_by_name["name"]._options = None -_DASHBOARD.fields_by_name["display_name"]._options = None -_DASHBOARD._options = None -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/monitoring_dashboard/v1/proto/dashboards_service_pb2.py b/google/cloud/monitoring_dashboard/v1/proto/dashboards_service_pb2.py deleted file mode 100644 index b586889..0000000 --- a/google/cloud/monitoring_dashboard/v1/proto/dashboards_service_pb2.py +++ /dev/null @@ -1,607 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring_dashboard_v1/proto/dashboards_service.proto - -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.api import resource_pb2 as google_dot_api_dot_resource__pb2 -from google.cloud.monitoring_dashboard.v1.proto import ( - dashboard_pb2 as google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboard__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring_dashboard_v1/proto/dashboards_service.proto", - package="google.monitoring.dashboard.v1", - syntax="proto3", - serialized_options=b'\n"com.google.monitoring.dashboard.v1B\026DashboardsServiceProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\352\002(Google::Cloud::Monitoring::Dashboard::V1', - create_key=_descriptor._internal_create_key, - serialized_pb=b'\nCgoogle/cloud/monitoring_dashboard_v1/proto/dashboards_service.proto\x12\x1egoogle.monitoring.dashboard.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a:google/cloud/monitoring_dashboard_v1/proto/dashboard.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a google/protobuf/field_mask.proto\x1a\x17google/api/client.proto"p\n\x16\x43reateDashboardRequest\x12\x13\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x41\n\tdashboard\x18\x02 \x01(\x0b\x32).google.monitoring.dashboard.v1.DashboardB\x03\xe0\x41\x02"\x83\x01\n\x15ListDashboardsRequest\x12\x43\n\x06parent\x18\x01 \x01(\tB3\xe0\x41\x02\xfa\x41-\n+cloudresourcemanager.googleapis.com/Project\x12\x11\n\tpage_size\x18\x02 \x01(\x05\x12\x12\n\npage_token\x18\x03 \x01(\t"p\n\x16ListDashboardsResponse\x12=\n\ndashboards\x18\x01 \x03(\x0b\x32).google.monitoring.dashboard.v1.Dashboard\x12\x17\n\x0fnext_page_token\x18\x02 \x01(\t"P\n\x13GetDashboardRequest\x12\x39\n\x04name\x18\x01 \x01(\tB+\xe0\x41\x02\xfa\x41%\n#monitoring.googleapis.com/Dashboard"S\n\x16\x44\x65leteDashboardRequest\x12\x39\n\x04name\x18\x01 \x01(\tB+\xe0\x41\x02\xfa\x41%\n#monitoring.googleapis.com/Dashboard"[\n\x16UpdateDashboardRequest\x12\x41\n\tdashboard\x18\x01 \x01(\x0b\x32).google.monitoring.dashboard.v1.DashboardB\x03\xe0\x41\x02\x32\xb1\x08\n\x11\x44\x61shboardsService\x12\xab\x01\n\x0f\x43reateDashboard\x12\x36.google.monitoring.dashboard.v1.CreateDashboardRequest\x1a).google.monitoring.dashboard.v1.Dashboard"5\x82\xd3\xe4\x93\x02/""/v1/{parent=projects/*}/dashboards:\tdashboard\x12\xab\x01\n\x0eListDashboards\x12\x35.google.monitoring.dashboard.v1.ListDashboardsRequest\x1a\x36.google.monitoring.dashboard.v1.ListDashboardsResponse"*\x82\xd3\xe4\x93\x02$\x12"/v1/{parent=projects/*}/dashboards\x12\x9a\x01\n\x0cGetDashboard\x12\x33.google.monitoring.dashboard.v1.GetDashboardRequest\x1a).google.monitoring.dashboard.v1.Dashboard"*\x82\xd3\xe4\x93\x02$\x12"/v1/{name=projects/*/dashboards/*}\x12\x8d\x01\n\x0f\x44\x65leteDashboard\x12\x36.google.monitoring.dashboard.v1.DeleteDashboardRequest\x1a\x16.google.protobuf.Empty"*\x82\xd3\xe4\x93\x02$*"/v1/{name=projects/*/dashboards/*}\x12\xb5\x01\n\x0fUpdateDashboard\x12\x36.google.monitoring.dashboard.v1.UpdateDashboardRequest\x1a).google.monitoring.dashboard.v1.Dashboard"?\x82\xd3\xe4\x93\x02\x39\x32,/v1/{dashboard.name=projects/*/dashboards/*}:\tdashboard\x1a\xda\x01\xca\x41\x19monitoring.googleapis.com\xd2\x41\xba\x01https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/monitoring,https://www.googleapis.com/auth/monitoring.read,https://www.googleapis.com/auth/monitoring.writeB\xb2\x01\n"com.google.monitoring.dashboard.v1B\x16\x44\x61shboardsServiceProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\xea\x02(Google::Cloud::Monitoring::Dashboard::V1b\x06proto3', - dependencies=[ - google_dot_api_dot_annotations__pb2.DESCRIPTOR, - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_api_dot_resource__pb2.DESCRIPTOR, - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboard__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR, - google_dot_api_dot_client__pb2.DESCRIPTOR, - ], -) - - -_CREATEDASHBOARDREQUEST = _descriptor.Descriptor( - name="CreateDashboardRequest", - full_name="google.monitoring.dashboard.v1.CreateDashboardRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.monitoring.dashboard.v1.CreateDashboardRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="dashboard", - full_name="google.monitoring.dashboard.v1.CreateDashboardRequest.dashboard", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=341, - serialized_end=453, -) - - -_LISTDASHBOARDSREQUEST = _descriptor.Descriptor( - name="ListDashboardsRequest", - full_name="google.monitoring.dashboard.v1.ListDashboardsRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="parent", - full_name="google.monitoring.dashboard.v1.ListDashboardsRequest.parent", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A-\n+cloudresourcemanager.googleapis.com/Project", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_size", - full_name="google.monitoring.dashboard.v1.ListDashboardsRequest.page_size", - index=1, - number=2, - type=5, - cpp_type=1, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="page_token", - full_name="google.monitoring.dashboard.v1.ListDashboardsRequest.page_token", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=456, - serialized_end=587, -) - - -_LISTDASHBOARDSRESPONSE = _descriptor.Descriptor( - name="ListDashboardsResponse", - full_name="google.monitoring.dashboard.v1.ListDashboardsResponse", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="dashboards", - full_name="google.monitoring.dashboard.v1.ListDashboardsResponse.dashboards", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="next_page_token", - full_name="google.monitoring.dashboard.v1.ListDashboardsResponse.next_page_token", - index=1, - number=2, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=589, - serialized_end=701, -) - - -_GETDASHBOARDREQUEST = _descriptor.Descriptor( - name="GetDashboardRequest", - full_name="google.monitoring.dashboard.v1.GetDashboardRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.dashboard.v1.GetDashboardRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A%\n#monitoring.googleapis.com/Dashboard", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=703, - serialized_end=783, -) - - -_DELETEDASHBOARDREQUEST = _descriptor.Descriptor( - name="DeleteDashboardRequest", - full_name="google.monitoring.dashboard.v1.DeleteDashboardRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="name", - full_name="google.monitoring.dashboard.v1.DeleteDashboardRequest.name", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002\372A%\n#monitoring.googleapis.com/Dashboard", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=785, - serialized_end=868, -) - - -_UPDATEDASHBOARDREQUEST = _descriptor.Descriptor( - name="UpdateDashboardRequest", - full_name="google.monitoring.dashboard.v1.UpdateDashboardRequest", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="dashboard", - full_name="google.monitoring.dashboard.v1.UpdateDashboardRequest.dashboard", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=870, - serialized_end=961, -) - -_CREATEDASHBOARDREQUEST.fields_by_name[ - "dashboard" -].message_type = ( - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboard__pb2._DASHBOARD -) -_LISTDASHBOARDSRESPONSE.fields_by_name[ - "dashboards" -].message_type = ( - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboard__pb2._DASHBOARD -) -_UPDATEDASHBOARDREQUEST.fields_by_name[ - "dashboard" -].message_type = ( - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboard__pb2._DASHBOARD -) -DESCRIPTOR.message_types_by_name["CreateDashboardRequest"] = _CREATEDASHBOARDREQUEST -DESCRIPTOR.message_types_by_name["ListDashboardsRequest"] = _LISTDASHBOARDSREQUEST -DESCRIPTOR.message_types_by_name["ListDashboardsResponse"] = _LISTDASHBOARDSRESPONSE -DESCRIPTOR.message_types_by_name["GetDashboardRequest"] = _GETDASHBOARDREQUEST -DESCRIPTOR.message_types_by_name["DeleteDashboardRequest"] = _DELETEDASHBOARDREQUEST -DESCRIPTOR.message_types_by_name["UpdateDashboardRequest"] = _UPDATEDASHBOARDREQUEST -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -CreateDashboardRequest = _reflection.GeneratedProtocolMessageType( - "CreateDashboardRequest", - (_message.Message,), - { - "DESCRIPTOR": _CREATEDASHBOARDREQUEST, - "__module__": "google.cloud.monitoring_dashboard.v1.proto.dashboards_service_pb2", - "__doc__": """The ``CreateDashboard`` request. - - Attributes: - parent: - Required. The project on which to execute the request. The - format is: :: projects/[PROJECT_ID_OR_NUMBER] The - ``[PROJECT_ID_OR_NUMBER]`` must match the dashboard resource - name. - dashboard: - Required. The initial dashboard specification. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.CreateDashboardRequest) - }, -) -_sym_db.RegisterMessage(CreateDashboardRequest) - -ListDashboardsRequest = _reflection.GeneratedProtocolMessageType( - "ListDashboardsRequest", - (_message.Message,), - { - "DESCRIPTOR": _LISTDASHBOARDSREQUEST, - "__module__": "google.cloud.monitoring_dashboard.v1.proto.dashboards_service_pb2", - "__doc__": """The ``ListDashboards`` request. - - Attributes: - parent: - Required. The scope of the dashboards to list. The format is: - :: projects/[PROJECT_ID_OR_NUMBER] - page_size: - A positive number that is the maximum number of results to - return. If unspecified, a default of 1000 is used. - page_token: - If this field is not empty then it must contain the - ``nextPageToken`` value returned by a previous call to this - method. Using this field causes the method to return - additional results from the previous method call. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.ListDashboardsRequest) - }, -) -_sym_db.RegisterMessage(ListDashboardsRequest) - -ListDashboardsResponse = _reflection.GeneratedProtocolMessageType( - "ListDashboardsResponse", - (_message.Message,), - { - "DESCRIPTOR": _LISTDASHBOARDSRESPONSE, - "__module__": "google.cloud.monitoring_dashboard.v1.proto.dashboards_service_pb2", - "__doc__": """The ``ListDashboards`` request. - - Attributes: - dashboards: - The list of requested dashboards. - next_page_token: - If there are more results than have been returned, then this - field is set to a non-empty value. To see the additional - results, use that value as ``page_token`` in the next call to - this method. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.ListDashboardsResponse) - }, -) -_sym_db.RegisterMessage(ListDashboardsResponse) - -GetDashboardRequest = _reflection.GeneratedProtocolMessageType( - "GetDashboardRequest", - (_message.Message,), - { - "DESCRIPTOR": _GETDASHBOARDREQUEST, - "__module__": "google.cloud.monitoring_dashboard.v1.proto.dashboards_service_pb2", - "__doc__": """The ``GetDashboard`` request. - - Attributes: - name: - Required. The resource name of the Dashboard. The format is - one of: - ``dashboards/[DASHBOARD_ID]`` (for system - dashboards) - - ``projects/[PROJECT_ID_OR_NUMBER]/dashboards/[DASHBOARD_ID]`` - (for custom dashboards). - """, - # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.GetDashboardRequest) - }, -) -_sym_db.RegisterMessage(GetDashboardRequest) - -DeleteDashboardRequest = _reflection.GeneratedProtocolMessageType( - "DeleteDashboardRequest", - (_message.Message,), - { - "DESCRIPTOR": _DELETEDASHBOARDREQUEST, - "__module__": "google.cloud.monitoring_dashboard.v1.proto.dashboards_service_pb2", - "__doc__": """The ``DeleteDashboard`` request. - - Attributes: - name: - Required. The resource name of the Dashboard. The format is: - :: - projects/[PROJECT_ID_OR_NUMBER]/dashboards/[DASHBOARD_ID] - """, - # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.DeleteDashboardRequest) - }, -) -_sym_db.RegisterMessage(DeleteDashboardRequest) - -UpdateDashboardRequest = _reflection.GeneratedProtocolMessageType( - "UpdateDashboardRequest", - (_message.Message,), - { - "DESCRIPTOR": _UPDATEDASHBOARDREQUEST, - "__module__": "google.cloud.monitoring_dashboard.v1.proto.dashboards_service_pb2", - "__doc__": """The ``UpdateDashboard`` request. - - Attributes: - dashboard: - Required. The dashboard that will replace the existing - dashboard. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.UpdateDashboardRequest) - }, -) -_sym_db.RegisterMessage(UpdateDashboardRequest) - - -DESCRIPTOR._options = None -_CREATEDASHBOARDREQUEST.fields_by_name["parent"]._options = None -_CREATEDASHBOARDREQUEST.fields_by_name["dashboard"]._options = None -_LISTDASHBOARDSREQUEST.fields_by_name["parent"]._options = None -_GETDASHBOARDREQUEST.fields_by_name["name"]._options = None -_DELETEDASHBOARDREQUEST.fields_by_name["name"]._options = None -_UPDATEDASHBOARDREQUEST.fields_by_name["dashboard"]._options = None - -_DASHBOARDSSERVICE = _descriptor.ServiceDescriptor( - name="DashboardsService", - full_name="google.monitoring.dashboard.v1.DashboardsService", - file=DESCRIPTOR, - index=0, - serialized_options=b"\312A\031monitoring.googleapis.com\322A\272\001https://www.googleapis.com/auth/cloud-platform,https://www.googleapis.com/auth/monitoring,https://www.googleapis.com/auth/monitoring.read,https://www.googleapis.com/auth/monitoring.write", - create_key=_descriptor._internal_create_key, - serialized_start=964, - serialized_end=2037, - methods=[ - _descriptor.MethodDescriptor( - name="CreateDashboard", - full_name="google.monitoring.dashboard.v1.DashboardsService.CreateDashboard", - index=0, - containing_service=None, - input_type=_CREATEDASHBOARDREQUEST, - output_type=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboard__pb2._DASHBOARD, - serialized_options=b'\202\323\344\223\002/""/v1/{parent=projects/*}/dashboards:\tdashboard', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="ListDashboards", - full_name="google.monitoring.dashboard.v1.DashboardsService.ListDashboards", - index=1, - containing_service=None, - input_type=_LISTDASHBOARDSREQUEST, - output_type=_LISTDASHBOARDSRESPONSE, - serialized_options=b'\202\323\344\223\002$\022"/v1/{parent=projects/*}/dashboards', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="GetDashboard", - full_name="google.monitoring.dashboard.v1.DashboardsService.GetDashboard", - index=2, - containing_service=None, - input_type=_GETDASHBOARDREQUEST, - output_type=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboard__pb2._DASHBOARD, - serialized_options=b'\202\323\344\223\002$\022"/v1/{name=projects/*/dashboards/*}', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="DeleteDashboard", - full_name="google.monitoring.dashboard.v1.DashboardsService.DeleteDashboard", - index=3, - containing_service=None, - input_type=_DELETEDASHBOARDREQUEST, - output_type=google_dot_protobuf_dot_empty__pb2._EMPTY, - serialized_options=b'\202\323\344\223\002$*"/v1/{name=projects/*/dashboards/*}', - create_key=_descriptor._internal_create_key, - ), - _descriptor.MethodDescriptor( - name="UpdateDashboard", - full_name="google.monitoring.dashboard.v1.DashboardsService.UpdateDashboard", - index=4, - containing_service=None, - input_type=_UPDATEDASHBOARDREQUEST, - output_type=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboard__pb2._DASHBOARD, - serialized_options=b"\202\323\344\223\00292,/v1/{dashboard.name=projects/*/dashboards/*}:\tdashboard", - create_key=_descriptor._internal_create_key, - ), - ], -) -_sym_db.RegisterServiceDescriptor(_DASHBOARDSSERVICE) - -DESCRIPTOR.services_by_name["DashboardsService"] = _DASHBOARDSSERVICE - -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/monitoring_dashboard/v1/proto/dashboards_service_pb2_grpc.py b/google/cloud/monitoring_dashboard/v1/proto/dashboards_service_pb2_grpc.py deleted file mode 100644 index 6034a8c..0000000 --- a/google/cloud/monitoring_dashboard/v1/proto/dashboards_service_pb2_grpc.py +++ /dev/null @@ -1,158 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! - -import grpc - -from google.cloud.monitoring_dashboard.v1.proto import ( - dashboard_pb2 as google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboard__pb2, -) -from google.cloud.monitoring_dashboard.v1.proto import ( - dashboards_service_pb2 as google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboards__service__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -class DashboardsServiceStub(object): - """Manages Stackdriver dashboards. A dashboard is an arrangement of data display - widgets in a specific layout. - """ - - def __init__(self, channel): - """Constructor. - - Args: - channel: A grpc.Channel. - """ - self.CreateDashboard = channel.unary_unary( - "/google.monitoring.dashboard.v1.DashboardsService/CreateDashboard", - request_serializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboards__service__pb2.CreateDashboardRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboard__pb2.Dashboard.FromString, - ) - self.ListDashboards = channel.unary_unary( - "/google.monitoring.dashboard.v1.DashboardsService/ListDashboards", - request_serializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboards__service__pb2.ListDashboardsRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboards__service__pb2.ListDashboardsResponse.FromString, - ) - self.GetDashboard = channel.unary_unary( - "/google.monitoring.dashboard.v1.DashboardsService/GetDashboard", - request_serializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboards__service__pb2.GetDashboardRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboard__pb2.Dashboard.FromString, - ) - self.DeleteDashboard = channel.unary_unary( - "/google.monitoring.dashboard.v1.DashboardsService/DeleteDashboard", - request_serializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboards__service__pb2.DeleteDashboardRequest.SerializeToString, - response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, - ) - self.UpdateDashboard = channel.unary_unary( - "/google.monitoring.dashboard.v1.DashboardsService/UpdateDashboard", - request_serializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboards__service__pb2.UpdateDashboardRequest.SerializeToString, - response_deserializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboard__pb2.Dashboard.FromString, - ) - - -class DashboardsServiceServicer(object): - """Manages Stackdriver dashboards. A dashboard is an arrangement of data display - widgets in a specific layout. - """ - - def CreateDashboard(self, request, context): - """Creates a new custom dashboard. - - This method requires the `monitoring.dashboards.create` permission - on the specified project. For more information, see - [Google Cloud IAM](https://cloud.google.com/iam). - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def ListDashboards(self, request, context): - """Lists the existing dashboards. - - This method requires the `monitoring.dashboards.list` permission - on the specified project. For more information, see - [Google Cloud IAM](https://cloud.google.com/iam). - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def GetDashboard(self, request, context): - """Fetches a specific dashboard. - - This method requires the `monitoring.dashboards.get` permission - on the specified dashboard. For more information, see - [Google Cloud IAM](https://cloud.google.com/iam). - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def DeleteDashboard(self, request, context): - """Deletes an existing custom dashboard. - - This method requires the `monitoring.dashboards.delete` permission - on the specified dashboard. For more information, see - [Google Cloud IAM](https://cloud.google.com/iam). - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - def UpdateDashboard(self, request, context): - """Replaces an existing custom dashboard with a new definition. - - This method requires the `monitoring.dashboards.update` permission - on the specified dashboard. For more information, see - [Google Cloud IAM](https://cloud.google.com/iam). - """ - context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details("Method not implemented!") - raise NotImplementedError("Method not implemented!") - - -def add_DashboardsServiceServicer_to_server(servicer, server): - rpc_method_handlers = { - "CreateDashboard": grpc.unary_unary_rpc_method_handler( - servicer.CreateDashboard, - request_deserializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboards__service__pb2.CreateDashboardRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboard__pb2.Dashboard.SerializeToString, - ), - "ListDashboards": grpc.unary_unary_rpc_method_handler( - servicer.ListDashboards, - request_deserializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboards__service__pb2.ListDashboardsRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboards__service__pb2.ListDashboardsResponse.SerializeToString, - ), - "GetDashboard": grpc.unary_unary_rpc_method_handler( - servicer.GetDashboard, - request_deserializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboards__service__pb2.GetDashboardRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboard__pb2.Dashboard.SerializeToString, - ), - "DeleteDashboard": grpc.unary_unary_rpc_method_handler( - servicer.DeleteDashboard, - request_deserializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboards__service__pb2.DeleteDashboardRequest.FromString, - response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, - ), - "UpdateDashboard": grpc.unary_unary_rpc_method_handler( - servicer.UpdateDashboard, - request_deserializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboards__service__pb2.UpdateDashboardRequest.FromString, - response_serializer=google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_dashboard__pb2.Dashboard.SerializeToString, - ), - } - generic_handler = grpc.method_handlers_generic_handler( - "google.monitoring.dashboard.v1.DashboardsService", rpc_method_handlers - ) - server.add_generic_rpc_handlers((generic_handler,)) diff --git a/google/cloud/monitoring_dashboard/v1/proto/drilldowns_pb2.py b/google/cloud/monitoring_dashboard/v1/proto/drilldowns_pb2.py deleted file mode 100644 index db83727..0000000 --- a/google/cloud/monitoring_dashboard/v1/proto/drilldowns_pb2.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring_dashboard_v1/proto/drilldowns.proto - -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.cloud.monitoring_dashboard.v1.proto import ( - common_pb2 as google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_common__pb2, -) - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring_dashboard_v1/proto/drilldowns.proto", - package="google.monitoring.dashboard.v1", - syntax="proto3", - serialized_options=b'\n"com.google.monitoring.dashboard.v1B\017DrilldownsProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\352\002(Google::Cloud::Monitoring::Dashboard::V1', - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n;google/cloud/monitoring_dashboard_v1/proto/drilldowns.proto\x12\x1egoogle.monitoring.dashboard.v1\x1a\x37google/cloud/monitoring_dashboard_v1/proto/common.protoB\xab\x01\n"com.google.monitoring.dashboard.v1B\x0f\x44rilldownsProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\xea\x02(Google::Cloud::Monitoring::Dashboard::V1b\x06proto3', - dependencies=[ - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_common__pb2.DESCRIPTOR, - ], -) - - -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/monitoring_dashboard/v1/proto/layouts_pb2.py b/google/cloud/monitoring_dashboard/v1/proto/layouts_pb2.py deleted file mode 100644 index 1bedef1..0000000 --- a/google/cloud/monitoring_dashboard/v1/proto/layouts_pb2.py +++ /dev/null @@ -1,434 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring_dashboard_v1/proto/layouts.proto - -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.cloud.monitoring_dashboard.v1.proto import ( - widget_pb2 as google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_widget__pb2, -) - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring_dashboard_v1/proto/layouts.proto", - package="google.monitoring.dashboard.v1", - syntax="proto3", - serialized_options=b'\n"com.google.monitoring.dashboard.v1B\014LayoutsProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\352\002(Google::Cloud::Monitoring::Dashboard::V1', - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n8google/cloud/monitoring_dashboard_v1/proto/layouts.proto\x12\x1egoogle.monitoring.dashboard.v1\x1a\x37google/cloud/monitoring_dashboard_v1/proto/widget.proto"V\n\nGridLayout\x12\x0f\n\x07\x63olumns\x18\x01 \x01(\x03\x12\x37\n\x07widgets\x18\x02 \x03(\x0b\x32&.google.monitoring.dashboard.v1.Widget"\x98\x01\n\tRowLayout\x12;\n\x04rows\x18\x01 \x03(\x0b\x32-.google.monitoring.dashboard.v1.RowLayout.Row\x1aN\n\x03Row\x12\x0e\n\x06weight\x18\x01 \x01(\x03\x12\x37\n\x07widgets\x18\x02 \x03(\x0b\x32&.google.monitoring.dashboard.v1.Widget"\xa7\x01\n\x0c\x43olumnLayout\x12\x44\n\x07\x63olumns\x18\x01 \x03(\x0b\x32\x33.google.monitoring.dashboard.v1.ColumnLayout.Column\x1aQ\n\x06\x43olumn\x12\x0e\n\x06weight\x18\x01 \x01(\x03\x12\x37\n\x07widgets\x18\x02 \x03(\x0b\x32&.google.monitoring.dashboard.v1.WidgetB\xa8\x01\n"com.google.monitoring.dashboard.v1B\x0cLayoutsProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\xea\x02(Google::Cloud::Monitoring::Dashboard::V1b\x06proto3', - dependencies=[ - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_widget__pb2.DESCRIPTOR, - ], -) - - -_GRIDLAYOUT = _descriptor.Descriptor( - name="GridLayout", - full_name="google.monitoring.dashboard.v1.GridLayout", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="columns", - full_name="google.monitoring.dashboard.v1.GridLayout.columns", - index=0, - number=1, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="widgets", - full_name="google.monitoring.dashboard.v1.GridLayout.widgets", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=149, - serialized_end=235, -) - - -_ROWLAYOUT_ROW = _descriptor.Descriptor( - name="Row", - full_name="google.monitoring.dashboard.v1.RowLayout.Row", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="weight", - full_name="google.monitoring.dashboard.v1.RowLayout.Row.weight", - index=0, - number=1, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="widgets", - full_name="google.monitoring.dashboard.v1.RowLayout.Row.widgets", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=312, - serialized_end=390, -) - -_ROWLAYOUT = _descriptor.Descriptor( - name="RowLayout", - full_name="google.monitoring.dashboard.v1.RowLayout", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="rows", - full_name="google.monitoring.dashboard.v1.RowLayout.rows", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_ROWLAYOUT_ROW,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=238, - serialized_end=390, -) - - -_COLUMNLAYOUT_COLUMN = _descriptor.Descriptor( - name="Column", - full_name="google.monitoring.dashboard.v1.ColumnLayout.Column", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="weight", - full_name="google.monitoring.dashboard.v1.ColumnLayout.Column.weight", - index=0, - number=1, - type=3, - cpp_type=2, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="widgets", - full_name="google.monitoring.dashboard.v1.ColumnLayout.Column.widgets", - index=1, - number=2, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=479, - serialized_end=560, -) - -_COLUMNLAYOUT = _descriptor.Descriptor( - name="ColumnLayout", - full_name="google.monitoring.dashboard.v1.ColumnLayout", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="columns", - full_name="google.monitoring.dashboard.v1.ColumnLayout.columns", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_COLUMNLAYOUT_COLUMN,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=393, - serialized_end=560, -) - -_GRIDLAYOUT.fields_by_name[ - "widgets" -].message_type = ( - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_widget__pb2._WIDGET -) -_ROWLAYOUT_ROW.fields_by_name[ - "widgets" -].message_type = ( - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_widget__pb2._WIDGET -) -_ROWLAYOUT_ROW.containing_type = _ROWLAYOUT -_ROWLAYOUT.fields_by_name["rows"].message_type = _ROWLAYOUT_ROW -_COLUMNLAYOUT_COLUMN.fields_by_name[ - "widgets" -].message_type = ( - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_widget__pb2._WIDGET -) -_COLUMNLAYOUT_COLUMN.containing_type = _COLUMNLAYOUT -_COLUMNLAYOUT.fields_by_name["columns"].message_type = _COLUMNLAYOUT_COLUMN -DESCRIPTOR.message_types_by_name["GridLayout"] = _GRIDLAYOUT -DESCRIPTOR.message_types_by_name["RowLayout"] = _ROWLAYOUT -DESCRIPTOR.message_types_by_name["ColumnLayout"] = _COLUMNLAYOUT -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -GridLayout = _reflection.GeneratedProtocolMessageType( - "GridLayout", - (_message.Message,), - { - "DESCRIPTOR": _GRIDLAYOUT, - "__module__": "google.cloud.monitoring_dashboard.v1.proto.layouts_pb2", - "__doc__": """A basic layout divides the available space into vertical columns of - equal width and arranges a list of widgets using a row-first strategy. - - Attributes: - columns: - The number of columns into which the view’s width is divided. - If omitted or set to zero, a system default will be used while - rendering. - widgets: - The informational elements that are arranged into the columns - row-first. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.GridLayout) - }, -) -_sym_db.RegisterMessage(GridLayout) - -RowLayout = _reflection.GeneratedProtocolMessageType( - "RowLayout", - (_message.Message,), - { - "Row": _reflection.GeneratedProtocolMessageType( - "Row", - (_message.Message,), - { - "DESCRIPTOR": _ROWLAYOUT_ROW, - "__module__": "google.cloud.monitoring_dashboard.v1.proto.layouts_pb2", - "__doc__": """Defines the layout properties and content for a row. - - Attributes: - weight: - The relative weight of this row. The row weight is used to - adjust the height of rows on the screen (relative to peers). - Greater the weight, greater the height of the row on the - screen. If omitted, a value of 1 is used while rendering. - widgets: - The display widgets arranged horizontally in this row. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.RowLayout.Row) - }, - ), - "DESCRIPTOR": _ROWLAYOUT, - "__module__": "google.cloud.monitoring_dashboard.v1.proto.layouts_pb2", - "__doc__": """A simplified layout that divides the available space into rows and - arranges a set of widgets horizontally in each row. - - Attributes: - rows: - The rows of content to display. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.RowLayout) - }, -) -_sym_db.RegisterMessage(RowLayout) -_sym_db.RegisterMessage(RowLayout.Row) - -ColumnLayout = _reflection.GeneratedProtocolMessageType( - "ColumnLayout", - (_message.Message,), - { - "Column": _reflection.GeneratedProtocolMessageType( - "Column", - (_message.Message,), - { - "DESCRIPTOR": _COLUMNLAYOUT_COLUMN, - "__module__": "google.cloud.monitoring_dashboard.v1.proto.layouts_pb2", - "__doc__": """Defines the layout properties and content for a column. - - Attributes: - weight: - The relative weight of this column. The column weight is used - to adjust the width of columns on the screen (relative to - peers). Greater the weight, greater the width of the column on - the screen. If omitted, a value of 1 is used while rendering. - widgets: - The display widgets arranged vertically in this column. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.ColumnLayout.Column) - }, - ), - "DESCRIPTOR": _COLUMNLAYOUT, - "__module__": "google.cloud.monitoring_dashboard.v1.proto.layouts_pb2", - "__doc__": """A simplified layout that divides the available space into vertical - columns and arranges a set of widgets vertically in each column. - - Attributes: - columns: - The columns of content to display. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.ColumnLayout) - }, -) -_sym_db.RegisterMessage(ColumnLayout) -_sym_db.RegisterMessage(ColumnLayout.Column) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/monitoring_dashboard/v1/proto/metrics_pb2.py b/google/cloud/monitoring_dashboard/v1/proto/metrics_pb2.py deleted file mode 100644 index 8e6a8c4..0000000 --- a/google/cloud/monitoring_dashboard/v1/proto/metrics_pb2.py +++ /dev/null @@ -1,944 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring_dashboard_v1/proto/metrics.proto - -from google.protobuf.internal import enum_type_wrapper -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.cloud.monitoring_dashboard.v1.proto import ( - common_pb2 as google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_common__pb2, -) - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring_dashboard_v1/proto/metrics.proto", - package="google.monitoring.dashboard.v1", - syntax="proto3", - serialized_options=b'\n"com.google.monitoring.dashboard.v1B\014MetricsProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\352\002(Google::Cloud::Monitoring::Dashboard::V1', - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n8google/cloud/monitoring_dashboard_v1/proto/metrics.proto\x12\x1egoogle.monitoring.dashboard.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x37google/cloud/monitoring_dashboard_v1/proto/common.proto"\x83\x02\n\x0fTimeSeriesQuery\x12N\n\x12time_series_filter\x18\x01 \x01(\x0b\x32\x30.google.monitoring.dashboard.v1.TimeSeriesFilterH\x00\x12Y\n\x18time_series_filter_ratio\x18\x02 \x01(\x0b\x32\x35.google.monitoring.dashboard.v1.TimeSeriesFilterRatioH\x00\x12$\n\x1atime_series_query_language\x18\x03 \x01(\tH\x00\x12\x15\n\runit_override\x18\x05 \x01(\tB\x08\n\x06source"\x8a\x03\n\x10TimeSeriesFilter\x12\x13\n\x06\x66ilter\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12@\n\x0b\x61ggregation\x18\x02 \x01(\x0b\x32+.google.monitoring.dashboard.v1.Aggregation\x12J\n\x15secondary_aggregation\x18\x03 \x01(\x0b\x32+.google.monitoring.dashboard.v1.Aggregation\x12W\n\x17pick_time_series_filter\x18\x04 \x01(\x0b\x32\x34.google.monitoring.dashboard.v1.PickTimeSeriesFilterH\x00\x12i\n\x1estatistical_time_series_filter\x18\x05 \x01(\x0b\x32;.google.monitoring.dashboard.v1.StatisticalTimeSeriesFilterB\x02\x18\x01H\x00\x42\x0f\n\routput_filter"\xc6\x04\n\x15TimeSeriesFilterRatio\x12R\n\tnumerator\x18\x01 \x01(\x0b\x32?.google.monitoring.dashboard.v1.TimeSeriesFilterRatio.RatioPart\x12T\n\x0b\x64\x65nominator\x18\x02 \x01(\x0b\x32?.google.monitoring.dashboard.v1.TimeSeriesFilterRatio.RatioPart\x12J\n\x15secondary_aggregation\x18\x03 \x01(\x0b\x32+.google.monitoring.dashboard.v1.Aggregation\x12W\n\x17pick_time_series_filter\x18\x04 \x01(\x0b\x32\x34.google.monitoring.dashboard.v1.PickTimeSeriesFilterH\x00\x12i\n\x1estatistical_time_series_filter\x18\x05 \x01(\x0b\x32;.google.monitoring.dashboard.v1.StatisticalTimeSeriesFilterB\x02\x18\x01H\x00\x1a\x62\n\tRatioPart\x12\x13\n\x06\x66ilter\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12@\n\x0b\x61ggregation\x18\x02 \x01(\x0b\x32+.google.monitoring.dashboard.v1.AggregationB\x0f\n\routput_filter"\xa4\x02\n\tThreshold\x12\r\n\x05label\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x01\x12>\n\x05\x63olor\x18\x03 \x01(\x0e\x32/.google.monitoring.dashboard.v1.Threshold.Color\x12\x46\n\tdirection\x18\x04 \x01(\x0e\x32\x33.google.monitoring.dashboard.v1.Threshold.Direction"3\n\x05\x43olor\x12\x15\n\x11\x43OLOR_UNSPECIFIED\x10\x00\x12\n\n\x06YELLOW\x10\x04\x12\x07\n\x03RED\x10\x06"<\n\tDirection\x12\x19\n\x15\x44IRECTION_UNSPECIFIED\x10\x00\x12\t\n\x05\x41\x42OVE\x10\x01\x12\t\n\x05\x42\x45LOW\x10\x02*Q\n\x0eSparkChartType\x12 \n\x1cSPARK_CHART_TYPE_UNSPECIFIED\x10\x00\x12\x0e\n\nSPARK_LINE\x10\x01\x12\r\n\tSPARK_BAR\x10\x02\x42\xa8\x01\n"com.google.monitoring.dashboard.v1B\x0cMetricsProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\xea\x02(Google::Cloud::Monitoring::Dashboard::V1b\x06proto3', - dependencies=[ - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_common__pb2.DESCRIPTOR, - ], -) - -_SPARKCHARTTYPE = _descriptor.EnumDescriptor( - name="SparkChartType", - full_name="google.monitoring.dashboard.v1.SparkChartType", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="SPARK_CHART_TYPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="SPARK_LINE", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="SPARK_BAR", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1721, - serialized_end=1802, -) -_sym_db.RegisterEnumDescriptor(_SPARKCHARTTYPE) - -SparkChartType = enum_type_wrapper.EnumTypeWrapper(_SPARKCHARTTYPE) -SPARK_CHART_TYPE_UNSPECIFIED = 0 -SPARK_LINE = 1 -SPARK_BAR = 2 - - -_THRESHOLD_COLOR = _descriptor.EnumDescriptor( - name="Color", - full_name="google.monitoring.dashboard.v1.Threshold.Color", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="COLOR_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="YELLOW", - index=1, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="RED", - index=2, - number=6, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1606, - serialized_end=1657, -) -_sym_db.RegisterEnumDescriptor(_THRESHOLD_COLOR) - -_THRESHOLD_DIRECTION = _descriptor.EnumDescriptor( - name="Direction", - full_name="google.monitoring.dashboard.v1.Threshold.Direction", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="DIRECTION_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="ABOVE", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="BELOW", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1659, - serialized_end=1719, -) -_sym_db.RegisterEnumDescriptor(_THRESHOLD_DIRECTION) - - -_TIMESERIESQUERY = _descriptor.Descriptor( - name="TimeSeriesQuery", - full_name="google.monitoring.dashboard.v1.TimeSeriesQuery", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="time_series_filter", - full_name="google.monitoring.dashboard.v1.TimeSeriesQuery.time_series_filter", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="time_series_filter_ratio", - full_name="google.monitoring.dashboard.v1.TimeSeriesQuery.time_series_filter_ratio", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="time_series_query_language", - full_name="google.monitoring.dashboard.v1.TimeSeriesQuery.time_series_query_language", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="unit_override", - full_name="google.monitoring.dashboard.v1.TimeSeriesQuery.unit_override", - index=3, - number=5, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="source", - full_name="google.monitoring.dashboard.v1.TimeSeriesQuery.source", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=183, - serialized_end=442, -) - - -_TIMESERIESFILTER = _descriptor.Descriptor( - name="TimeSeriesFilter", - full_name="google.monitoring.dashboard.v1.TimeSeriesFilter", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="filter", - full_name="google.monitoring.dashboard.v1.TimeSeriesFilter.filter", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="aggregation", - full_name="google.monitoring.dashboard.v1.TimeSeriesFilter.aggregation", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="secondary_aggregation", - full_name="google.monitoring.dashboard.v1.TimeSeriesFilter.secondary_aggregation", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="pick_time_series_filter", - full_name="google.monitoring.dashboard.v1.TimeSeriesFilter.pick_time_series_filter", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="statistical_time_series_filter", - full_name="google.monitoring.dashboard.v1.TimeSeriesFilter.statistical_time_series_filter", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\030\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="output_filter", - full_name="google.monitoring.dashboard.v1.TimeSeriesFilter.output_filter", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=445, - serialized_end=839, -) - - -_TIMESERIESFILTERRATIO_RATIOPART = _descriptor.Descriptor( - name="RatioPart", - full_name="google.monitoring.dashboard.v1.TimeSeriesFilterRatio.RatioPart", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="filter", - full_name="google.monitoring.dashboard.v1.TimeSeriesFilterRatio.RatioPart.filter", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="aggregation", - full_name="google.monitoring.dashboard.v1.TimeSeriesFilterRatio.RatioPart.aggregation", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1309, - serialized_end=1407, -) - -_TIMESERIESFILTERRATIO = _descriptor.Descriptor( - name="TimeSeriesFilterRatio", - full_name="google.monitoring.dashboard.v1.TimeSeriesFilterRatio", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="numerator", - full_name="google.monitoring.dashboard.v1.TimeSeriesFilterRatio.numerator", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="denominator", - full_name="google.monitoring.dashboard.v1.TimeSeriesFilterRatio.denominator", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="secondary_aggregation", - full_name="google.monitoring.dashboard.v1.TimeSeriesFilterRatio.secondary_aggregation", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="pick_time_series_filter", - full_name="google.monitoring.dashboard.v1.TimeSeriesFilterRatio.pick_time_series_filter", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="statistical_time_series_filter", - full_name="google.monitoring.dashboard.v1.TimeSeriesFilterRatio.statistical_time_series_filter", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\030\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_TIMESERIESFILTERRATIO_RATIOPART,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="output_filter", - full_name="google.monitoring.dashboard.v1.TimeSeriesFilterRatio.output_filter", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=842, - serialized_end=1424, -) - - -_THRESHOLD = _descriptor.Descriptor( - name="Threshold", - full_name="google.monitoring.dashboard.v1.Threshold", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="label", - full_name="google.monitoring.dashboard.v1.Threshold.label", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="value", - full_name="google.monitoring.dashboard.v1.Threshold.value", - index=1, - number=2, - type=1, - cpp_type=5, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="color", - full_name="google.monitoring.dashboard.v1.Threshold.color", - index=2, - number=3, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="direction", - full_name="google.monitoring.dashboard.v1.Threshold.direction", - index=3, - number=4, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_THRESHOLD_COLOR, _THRESHOLD_DIRECTION,], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1427, - serialized_end=1719, -) - -_TIMESERIESQUERY.fields_by_name["time_series_filter"].message_type = _TIMESERIESFILTER -_TIMESERIESQUERY.fields_by_name[ - "time_series_filter_ratio" -].message_type = _TIMESERIESFILTERRATIO -_TIMESERIESQUERY.oneofs_by_name["source"].fields.append( - _TIMESERIESQUERY.fields_by_name["time_series_filter"] -) -_TIMESERIESQUERY.fields_by_name[ - "time_series_filter" -].containing_oneof = _TIMESERIESQUERY.oneofs_by_name["source"] -_TIMESERIESQUERY.oneofs_by_name["source"].fields.append( - _TIMESERIESQUERY.fields_by_name["time_series_filter_ratio"] -) -_TIMESERIESQUERY.fields_by_name[ - "time_series_filter_ratio" -].containing_oneof = _TIMESERIESQUERY.oneofs_by_name["source"] -_TIMESERIESQUERY.oneofs_by_name["source"].fields.append( - _TIMESERIESQUERY.fields_by_name["time_series_query_language"] -) -_TIMESERIESQUERY.fields_by_name[ - "time_series_query_language" -].containing_oneof = _TIMESERIESQUERY.oneofs_by_name["source"] -_TIMESERIESFILTER.fields_by_name[ - "aggregation" -].message_type = ( - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_common__pb2._AGGREGATION -) -_TIMESERIESFILTER.fields_by_name[ - "secondary_aggregation" -].message_type = ( - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_common__pb2._AGGREGATION -) -_TIMESERIESFILTER.fields_by_name[ - "pick_time_series_filter" -].message_type = ( - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_common__pb2._PICKTIMESERIESFILTER -) -_TIMESERIESFILTER.fields_by_name[ - "statistical_time_series_filter" -].message_type = ( - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_common__pb2._STATISTICALTIMESERIESFILTER -) -_TIMESERIESFILTER.oneofs_by_name["output_filter"].fields.append( - _TIMESERIESFILTER.fields_by_name["pick_time_series_filter"] -) -_TIMESERIESFILTER.fields_by_name[ - "pick_time_series_filter" -].containing_oneof = _TIMESERIESFILTER.oneofs_by_name["output_filter"] -_TIMESERIESFILTER.oneofs_by_name["output_filter"].fields.append( - _TIMESERIESFILTER.fields_by_name["statistical_time_series_filter"] -) -_TIMESERIESFILTER.fields_by_name[ - "statistical_time_series_filter" -].containing_oneof = _TIMESERIESFILTER.oneofs_by_name["output_filter"] -_TIMESERIESFILTERRATIO_RATIOPART.fields_by_name[ - "aggregation" -].message_type = ( - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_common__pb2._AGGREGATION -) -_TIMESERIESFILTERRATIO_RATIOPART.containing_type = _TIMESERIESFILTERRATIO -_TIMESERIESFILTERRATIO.fields_by_name[ - "numerator" -].message_type = _TIMESERIESFILTERRATIO_RATIOPART -_TIMESERIESFILTERRATIO.fields_by_name[ - "denominator" -].message_type = _TIMESERIESFILTERRATIO_RATIOPART -_TIMESERIESFILTERRATIO.fields_by_name[ - "secondary_aggregation" -].message_type = ( - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_common__pb2._AGGREGATION -) -_TIMESERIESFILTERRATIO.fields_by_name[ - "pick_time_series_filter" -].message_type = ( - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_common__pb2._PICKTIMESERIESFILTER -) -_TIMESERIESFILTERRATIO.fields_by_name[ - "statistical_time_series_filter" -].message_type = ( - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_common__pb2._STATISTICALTIMESERIESFILTER -) -_TIMESERIESFILTERRATIO.oneofs_by_name["output_filter"].fields.append( - _TIMESERIESFILTERRATIO.fields_by_name["pick_time_series_filter"] -) -_TIMESERIESFILTERRATIO.fields_by_name[ - "pick_time_series_filter" -].containing_oneof = _TIMESERIESFILTERRATIO.oneofs_by_name["output_filter"] -_TIMESERIESFILTERRATIO.oneofs_by_name["output_filter"].fields.append( - _TIMESERIESFILTERRATIO.fields_by_name["statistical_time_series_filter"] -) -_TIMESERIESFILTERRATIO.fields_by_name[ - "statistical_time_series_filter" -].containing_oneof = _TIMESERIESFILTERRATIO.oneofs_by_name["output_filter"] -_THRESHOLD.fields_by_name["color"].enum_type = _THRESHOLD_COLOR -_THRESHOLD.fields_by_name["direction"].enum_type = _THRESHOLD_DIRECTION -_THRESHOLD_COLOR.containing_type = _THRESHOLD -_THRESHOLD_DIRECTION.containing_type = _THRESHOLD -DESCRIPTOR.message_types_by_name["TimeSeriesQuery"] = _TIMESERIESQUERY -DESCRIPTOR.message_types_by_name["TimeSeriesFilter"] = _TIMESERIESFILTER -DESCRIPTOR.message_types_by_name["TimeSeriesFilterRatio"] = _TIMESERIESFILTERRATIO -DESCRIPTOR.message_types_by_name["Threshold"] = _THRESHOLD -DESCRIPTOR.enum_types_by_name["SparkChartType"] = _SPARKCHARTTYPE -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -TimeSeriesQuery = _reflection.GeneratedProtocolMessageType( - "TimeSeriesQuery", - (_message.Message,), - { - "DESCRIPTOR": _TIMESERIESQUERY, - "__module__": "google.cloud.monitoring_dashboard.v1.proto.metrics_pb2", - "__doc__": """TimeSeriesQuery collects the set of supported methods for querying - time series data from the Stackdriver metrics API. - - Attributes: - source: - Parameters needed to obtain data for the chart. - time_series_filter: - Filter parameters to fetch time series. - time_series_filter_ratio: - Parameters to fetch a ratio between two time series filters. - time_series_query_language: - A query used to fetch time series. - unit_override: - The unit of data contained in fetched time series. If non- - empty, this unit will override any unit that accompanies - fetched data. The format is the same as the ```unit`` `__ field in ``MetricDescriptor``. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.TimeSeriesQuery) - }, -) -_sym_db.RegisterMessage(TimeSeriesQuery) - -TimeSeriesFilter = _reflection.GeneratedProtocolMessageType( - "TimeSeriesFilter", - (_message.Message,), - { - "DESCRIPTOR": _TIMESERIESFILTER, - "__module__": "google.cloud.monitoring_dashboard.v1.proto.metrics_pb2", - "__doc__": """A filter that defines a subset of time series data that is displayed - in a widget. Time series data is fetched using the ```ListTimeSeries`` - `__ method. - - Attributes: - filter: - Required. The `monitoring filter - `__ that - identifies the metric types, resources, and projects to query. - aggregation: - By default, the raw time series data is returned. Use this - field to combine multiple time series for different views of - the data. - secondary_aggregation: - Apply a second aggregation after ``aggregation`` is applied. - output_filter: - Selects an optional time series filter. - pick_time_series_filter: - Ranking based time series filter. - statistical_time_series_filter: - Statistics based time series filter. Note: This field is - deprecated and completely ignored by the API. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.TimeSeriesFilter) - }, -) -_sym_db.RegisterMessage(TimeSeriesFilter) - -TimeSeriesFilterRatio = _reflection.GeneratedProtocolMessageType( - "TimeSeriesFilterRatio", - (_message.Message,), - { - "RatioPart": _reflection.GeneratedProtocolMessageType( - "RatioPart", - (_message.Message,), - { - "DESCRIPTOR": _TIMESERIESFILTERRATIO_RATIOPART, - "__module__": "google.cloud.monitoring_dashboard.v1.proto.metrics_pb2", - "__doc__": """Describes a query to build the numerator or denominator of a - TimeSeriesFilterRatio. - - Attributes: - filter: - Required. The `monitoring filter - `__ that - identifies the metric types, resources, and projects to query. - aggregation: - By default, the raw time series data is returned. Use this - field to combine multiple time series for different views of - the data. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.TimeSeriesFilterRatio.RatioPart) - }, - ), - "DESCRIPTOR": _TIMESERIESFILTERRATIO, - "__module__": "google.cloud.monitoring_dashboard.v1.proto.metrics_pb2", - "__doc__": """A pair of time series filters that define a ratio computation. The - output time series is the pair-wise division of each aligned element - from the numerator and denominator time series. - - Attributes: - numerator: - The numerator of the ratio. - denominator: - The denominator of the ratio. - secondary_aggregation: - Apply a second aggregation after the ratio is computed. - output_filter: - Selects an optional filter that is applied to the time series - after computing the ratio. - pick_time_series_filter: - Ranking based time series filter. - statistical_time_series_filter: - Statistics based time series filter. Note: This field is - deprecated and completely ignored by the API. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.TimeSeriesFilterRatio) - }, -) -_sym_db.RegisterMessage(TimeSeriesFilterRatio) -_sym_db.RegisterMessage(TimeSeriesFilterRatio.RatioPart) - -Threshold = _reflection.GeneratedProtocolMessageType( - "Threshold", - (_message.Message,), - { - "DESCRIPTOR": _THRESHOLD, - "__module__": "google.cloud.monitoring_dashboard.v1.proto.metrics_pb2", - "__doc__": """Defines a threshold for categorizing time series values. - - Attributes: - label: - A label for the threshold. - value: - The value of the threshold. The value should be defined in the - native scale of the metric. - color: - The state color for this threshold. Color is not allowed in a - XyChart. - direction: - The direction for the current threshold. Direction is not - allowed in a XyChart. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.Threshold) - }, -) -_sym_db.RegisterMessage(Threshold) - - -DESCRIPTOR._options = None -_TIMESERIESFILTER.fields_by_name["filter"]._options = None -_TIMESERIESFILTER.fields_by_name["statistical_time_series_filter"]._options = None -_TIMESERIESFILTERRATIO_RATIOPART.fields_by_name["filter"]._options = None -_TIMESERIESFILTERRATIO.fields_by_name["statistical_time_series_filter"]._options = None -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/monitoring_dashboard/v1/proto/metrics_pb2_grpc.py b/google/cloud/monitoring_dashboard/v1/proto/metrics_pb2_grpc.py deleted file mode 100644 index b662812..0000000 --- a/google/cloud/monitoring_dashboard/v1/proto/metrics_pb2_grpc.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! - -import grpc diff --git a/google/cloud/monitoring_dashboard/v1/proto/scorecard_pb2.py b/google/cloud/monitoring_dashboard/v1/proto/scorecard_pb2.py deleted file mode 100644 index be5f60c..0000000 --- a/google/cloud/monitoring_dashboard/v1/proto/scorecard_pb2.py +++ /dev/null @@ -1,410 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring_dashboard_v1/proto/scorecard.proto - -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.cloud.monitoring_dashboard.v1.proto import ( - metrics_pb2 as google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_metrics__pb2, -) -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring_dashboard_v1/proto/scorecard.proto", - package="google.monitoring.dashboard.v1", - syntax="proto3", - serialized_options=b'\n"com.google.monitoring.dashboard.v1B\016ScorecardProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\352\002(Google::Cloud::Monitoring::Dashboard::V1', - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n:google/cloud/monitoring_dashboard_v1/proto/scorecard.proto\x12\x1egoogle.monitoring.dashboard.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x38google/cloud/monitoring_dashboard_v1/proto/metrics.proto\x1a\x1egoogle/protobuf/duration.proto"\x9b\x04\n\tScorecard\x12O\n\x11time_series_query\x18\x01 \x01(\x0b\x32/.google.monitoring.dashboard.v1.TimeSeriesQueryB\x03\xe0\x41\x02\x12I\n\ngauge_view\x18\x04 \x01(\x0b\x32\x33.google.monitoring.dashboard.v1.Scorecard.GaugeViewH\x00\x12T\n\x10spark_chart_view\x18\x05 \x01(\x0b\x32\x38.google.monitoring.dashboard.v1.Scorecard.SparkChartViewH\x00\x12=\n\nthresholds\x18\x06 \x03(\x0b\x32).google.monitoring.dashboard.v1.Threshold\x1a\x35\n\tGaugeView\x12\x13\n\x0blower_bound\x18\x01 \x01(\x01\x12\x13\n\x0bupper_bound\x18\x02 \x01(\x01\x1a\x98\x01\n\x0eSparkChartView\x12M\n\x10spark_chart_type\x18\x01 \x01(\x0e\x32..google.monitoring.dashboard.v1.SparkChartTypeB\x03\xe0\x41\x02\x12\x37\n\x14min_alignment_period\x18\x02 \x01(\x0b\x32\x19.google.protobuf.DurationB\x0b\n\tdata_viewB\xaa\x01\n"com.google.monitoring.dashboard.v1B\x0eScorecardProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\xea\x02(Google::Cloud::Monitoring::Dashboard::V1b\x06proto3', - dependencies=[ - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_metrics__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - ], -) - - -_SCORECARD_GAUGEVIEW = _descriptor.Descriptor( - name="GaugeView", - full_name="google.monitoring.dashboard.v1.Scorecard.GaugeView", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="lower_bound", - full_name="google.monitoring.dashboard.v1.Scorecard.GaugeView.lower_bound", - index=0, - number=1, - type=1, - cpp_type=5, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="upper_bound", - full_name="google.monitoring.dashboard.v1.Scorecard.GaugeView.upper_bound", - index=1, - number=2, - type=1, - cpp_type=5, - label=1, - has_default_value=False, - default_value=float(0), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=536, - serialized_end=589, -) - -_SCORECARD_SPARKCHARTVIEW = _descriptor.Descriptor( - name="SparkChartView", - full_name="google.monitoring.dashboard.v1.Scorecard.SparkChartView", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="spark_chart_type", - full_name="google.monitoring.dashboard.v1.Scorecard.SparkChartView.spark_chart_type", - index=0, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="min_alignment_period", - full_name="google.monitoring.dashboard.v1.Scorecard.SparkChartView.min_alignment_period", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=592, - serialized_end=744, -) - -_SCORECARD = _descriptor.Descriptor( - name="Scorecard", - full_name="google.monitoring.dashboard.v1.Scorecard", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="time_series_query", - full_name="google.monitoring.dashboard.v1.Scorecard.time_series_query", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="gauge_view", - full_name="google.monitoring.dashboard.v1.Scorecard.gauge_view", - index=1, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="spark_chart_view", - full_name="google.monitoring.dashboard.v1.Scorecard.spark_chart_view", - index=2, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="thresholds", - full_name="google.monitoring.dashboard.v1.Scorecard.thresholds", - index=3, - number=6, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_SCORECARD_GAUGEVIEW, _SCORECARD_SPARKCHARTVIEW,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="data_view", - full_name="google.monitoring.dashboard.v1.Scorecard.data_view", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=218, - serialized_end=757, -) - -_SCORECARD_GAUGEVIEW.containing_type = _SCORECARD -_SCORECARD_SPARKCHARTVIEW.fields_by_name[ - "spark_chart_type" -].enum_type = ( - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_metrics__pb2._SPARKCHARTTYPE -) -_SCORECARD_SPARKCHARTVIEW.fields_by_name[ - "min_alignment_period" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_SCORECARD_SPARKCHARTVIEW.containing_type = _SCORECARD -_SCORECARD.fields_by_name[ - "time_series_query" -].message_type = ( - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_metrics__pb2._TIMESERIESQUERY -) -_SCORECARD.fields_by_name["gauge_view"].message_type = _SCORECARD_GAUGEVIEW -_SCORECARD.fields_by_name["spark_chart_view"].message_type = _SCORECARD_SPARKCHARTVIEW -_SCORECARD.fields_by_name[ - "thresholds" -].message_type = ( - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_metrics__pb2._THRESHOLD -) -_SCORECARD.oneofs_by_name["data_view"].fields.append( - _SCORECARD.fields_by_name["gauge_view"] -) -_SCORECARD.fields_by_name["gauge_view"].containing_oneof = _SCORECARD.oneofs_by_name[ - "data_view" -] -_SCORECARD.oneofs_by_name["data_view"].fields.append( - _SCORECARD.fields_by_name["spark_chart_view"] -) -_SCORECARD.fields_by_name[ - "spark_chart_view" -].containing_oneof = _SCORECARD.oneofs_by_name["data_view"] -DESCRIPTOR.message_types_by_name["Scorecard"] = _SCORECARD -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Scorecard = _reflection.GeneratedProtocolMessageType( - "Scorecard", - (_message.Message,), - { - "GaugeView": _reflection.GeneratedProtocolMessageType( - "GaugeView", - (_message.Message,), - { - "DESCRIPTOR": _SCORECARD_GAUGEVIEW, - "__module__": "google.cloud.monitoring_dashboard.v1.proto.scorecard_pb2", - "__doc__": """A gauge chart shows where the current value sits within a pre-defined - range. The upper and lower bounds should define the possible range of - values for the scorecard’s query (inclusive). - - Attributes: - lower_bound: - The lower bound for this gauge chart. The value of the chart - should always be greater than or equal to this. - upper_bound: - The upper bound for this gauge chart. The value of the chart - should always be less than or equal to this. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.Scorecard.GaugeView) - }, - ), - "SparkChartView": _reflection.GeneratedProtocolMessageType( - "SparkChartView", - (_message.Message,), - { - "DESCRIPTOR": _SCORECARD_SPARKCHARTVIEW, - "__module__": "google.cloud.monitoring_dashboard.v1.proto.scorecard_pb2", - "__doc__": """A sparkChart is a small chart suitable for inclusion in a table-cell - or inline in text. This message contains the configuration for a - sparkChart to show up on a Scorecard, showing recent trends of the - scorecard’s timeseries. - - Attributes: - spark_chart_type: - Required. The type of sparkchart to show in this chartView. - min_alignment_period: - The lower bound on data point frequency in the chart - implemented by specifying the minimum alignment period to use - in a time series query. For example, if the data is published - once every 10 minutes it would not make sense to fetch and - align data at one minute intervals. This field is optional and - exists only as a hint. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.Scorecard.SparkChartView) - }, - ), - "DESCRIPTOR": _SCORECARD, - "__module__": "google.cloud.monitoring_dashboard.v1.proto.scorecard_pb2", - "__doc__": """A widget showing the latest value of a metric, and how this value - relates to one or more thresholds. - - Attributes: - time_series_query: - Required. Fields for querying time series data from the - Stackdriver metrics API. - data_view: - Defines the optional additional chart shown on the scorecard. - If neither is included - then a default scorecard is shown. - gauge_view: - Will cause the scorecard to show a gauge chart. - spark_chart_view: - Will cause the scorecard to show a spark chart. - thresholds: - The thresholds used to determine the state of the scorecard - given the time series’ current value. For an actual value x, - the scorecard is in a danger state if x is less than or equal - to a danger threshold that triggers below, or greater than or - equal to a danger threshold that triggers above. Similarly, if - x is above/below a warning threshold that triggers - above/below, then the scorecard is in a warning state - unless - x also puts it in a danger state. (Danger trumps warning.) As - an example, consider a scorecard with the following four - thresholds: { value: 90, category: ‘DANGER’, trigger: ‘ABOVE’, - }, { value: 70, category: ‘WARNING’, trigger: ‘ABOVE’, }, { - value: 10, category: ‘DANGER’, trigger: ‘BELOW’, }, { value: - 20, category: ‘WARNING’, trigger: ‘BELOW’, } Then: values - less than or equal to 10 would put the scorecard in a DANGER - state, values greater than 10 but less than or equal to 20 a - WARNING state, values strictly between 20 and 70 an OK state, - values greater than or equal to 70 but less than 90 a WARNING - state, and values greater than or equal to 90 a DANGER state. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.Scorecard) - }, -) -_sym_db.RegisterMessage(Scorecard) -_sym_db.RegisterMessage(Scorecard.GaugeView) -_sym_db.RegisterMessage(Scorecard.SparkChartView) - - -DESCRIPTOR._options = None -_SCORECARD_SPARKCHARTVIEW.fields_by_name["spark_chart_type"]._options = None -_SCORECARD.fields_by_name["time_series_query"]._options = None -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/monitoring_dashboard/v1/proto/scorecard_pb2_grpc.py b/google/cloud/monitoring_dashboard/v1/proto/scorecard_pb2_grpc.py deleted file mode 100644 index b662812..0000000 --- a/google/cloud/monitoring_dashboard/v1/proto/scorecard_pb2_grpc.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! - -import grpc diff --git a/google/cloud/monitoring_dashboard/v1/proto/service_pb2.py b/google/cloud/monitoring_dashboard/v1/proto/service_pb2.py deleted file mode 100644 index 05d5e26..0000000 --- a/google/cloud/monitoring_dashboard/v1/proto/service_pb2.py +++ /dev/null @@ -1,44 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring_dashboard_v1/proto/service.proto - -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring_dashboard_v1/proto/service.proto", - package="google.monitoring.dashboard.v1", - syntax="proto3", - serialized_options=b'\n"com.google.monitoring.dashboard.v1B\026ServiceMonitoringProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\352\002(Google::Cloud::Monitoring::Dashboard::V1', - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n8google/cloud/monitoring_dashboard_v1/proto/service.proto\x12\x1egoogle.monitoring.dashboard.v1B\xb2\x01\n"com.google.monitoring.dashboard.v1B\x16ServiceMonitoringProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\xea\x02(Google::Cloud::Monitoring::Dashboard::V1b\x06proto3', -) - - -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/monitoring_dashboard/v1/proto/service_pb2_grpc.py b/google/cloud/monitoring_dashboard/v1/proto/service_pb2_grpc.py deleted file mode 100644 index b662812..0000000 --- a/google/cloud/monitoring_dashboard/v1/proto/service_pb2_grpc.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! - -import grpc diff --git a/google/cloud/monitoring_dashboard/v1/proto/text_pb2.py b/google/cloud/monitoring_dashboard/v1/proto/text_pb2.py deleted file mode 100644 index d121080..0000000 --- a/google/cloud/monitoring_dashboard/v1/proto/text_pb2.py +++ /dev/null @@ -1,165 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring_dashboard_v1/proto/text.proto - -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring_dashboard_v1/proto/text.proto", - package="google.monitoring.dashboard.v1", - syntax="proto3", - serialized_options=b'\n"com.google.monitoring.dashboard.v1B\tTextProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\352\002(Google::Cloud::Monitoring::Dashboard::V1', - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n5google/cloud/monitoring_dashboard_v1/proto/text.proto\x12\x1egoogle.monitoring.dashboard.v1"\x8d\x01\n\x04Text\x12\x0f\n\x07\x63ontent\x18\x01 \x01(\t\x12;\n\x06\x66ormat\x18\x02 \x01(\x0e\x32+.google.monitoring.dashboard.v1.Text.Format"7\n\x06\x46ormat\x12\x16\n\x12\x46ORMAT_UNSPECIFIED\x10\x00\x12\x0c\n\x08MARKDOWN\x10\x01\x12\x07\n\x03RAW\x10\x02\x42\xa5\x01\n"com.google.monitoring.dashboard.v1B\tTextProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\xea\x02(Google::Cloud::Monitoring::Dashboard::V1b\x06proto3', -) - - -_TEXT_FORMAT = _descriptor.EnumDescriptor( - name="Format", - full_name="google.monitoring.dashboard.v1.Text.Format", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="FORMAT_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="MARKDOWN", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="RAW", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=176, - serialized_end=231, -) -_sym_db.RegisterEnumDescriptor(_TEXT_FORMAT) - - -_TEXT = _descriptor.Descriptor( - name="Text", - full_name="google.monitoring.dashboard.v1.Text", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="content", - full_name="google.monitoring.dashboard.v1.Text.content", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="format", - full_name="google.monitoring.dashboard.v1.Text.format", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_TEXT_FORMAT,], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=90, - serialized_end=231, -) - -_TEXT.fields_by_name["format"].enum_type = _TEXT_FORMAT -_TEXT_FORMAT.containing_type = _TEXT -DESCRIPTOR.message_types_by_name["Text"] = _TEXT -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Text = _reflection.GeneratedProtocolMessageType( - "Text", - (_message.Message,), - { - "DESCRIPTOR": _TEXT, - "__module__": "google.cloud.monitoring_dashboard.v1.proto.text_pb2", - "__doc__": """A widget that displays textual content. - - Attributes: - content: - The text content to be displayed. - format: - How the text content is formatted. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.Text) - }, -) -_sym_db.RegisterMessage(Text) - - -DESCRIPTOR._options = None -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/monitoring_dashboard/v1/proto/text_pb2_grpc.py b/google/cloud/monitoring_dashboard/v1/proto/text_pb2_grpc.py deleted file mode 100644 index b662812..0000000 --- a/google/cloud/monitoring_dashboard/v1/proto/text_pb2_grpc.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! - -import grpc diff --git a/google/cloud/monitoring_dashboard/v1/proto/widget_pb2.py b/google/cloud/monitoring_dashboard/v1/proto/widget_pb2.py deleted file mode 100644 index 881e922..0000000 --- a/google/cloud/monitoring_dashboard/v1/proto/widget_pb2.py +++ /dev/null @@ -1,243 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring_dashboard_v1/proto/widget.proto - -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.cloud.monitoring_dashboard.v1.proto import ( - scorecard_pb2 as google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_scorecard__pb2, -) -from google.cloud.monitoring_dashboard.v1.proto import ( - text_pb2 as google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_text__pb2, -) -from google.cloud.monitoring_dashboard.v1.proto import ( - xychart_pb2 as google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_xychart__pb2, -) -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring_dashboard_v1/proto/widget.proto", - package="google.monitoring.dashboard.v1", - syntax="proto3", - serialized_options=b'\n"com.google.monitoring.dashboard.v1B\013WidgetProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\352\002(Google::Cloud::Monitoring::Dashboard::V1', - create_key=_descriptor._internal_create_key, - serialized_pb=b"\n7google/cloud/monitoring_dashboard_v1/proto/widget.proto\x12\x1egoogle.monitoring.dashboard.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a:google/cloud/monitoring_dashboard_v1/proto/scorecard.proto\x1a\x35google/cloud/monitoring_dashboard_v1/proto/text.proto\x1a\x38google/cloud/monitoring_dashboard_v1/proto/xychart.proto\x1a\x1bgoogle/protobuf/empty.proto\"\x83\x02\n\x06Widget\x12\x12\n\x05title\x18\x01 \x01(\tB\x03\xe0\x41\x01\x12;\n\x08xy_chart\x18\x02 \x01(\x0b\x32'.google.monitoring.dashboard.v1.XyChartH\x00\x12>\n\tscorecard\x18\x03 \x01(\x0b\x32).google.monitoring.dashboard.v1.ScorecardH\x00\x12\x34\n\x04text\x18\x04 \x01(\x0b\x32$.google.monitoring.dashboard.v1.TextH\x00\x12'\n\x05\x62lank\x18\x05 \x01(\x0b\x32\x16.google.protobuf.EmptyH\x00\x42\t\n\x07\x63ontentB\xa7\x01\n\"com.google.monitoring.dashboard.v1B\x0bWidgetProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\xea\x02(Google::Cloud::Monitoring::Dashboard::V1b\x06proto3", - dependencies=[ - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_scorecard__pb2.DESCRIPTOR, - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_text__pb2.DESCRIPTOR, - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_xychart__pb2.DESCRIPTOR, - google_dot_protobuf_dot_empty__pb2.DESCRIPTOR, - ], -) - - -_WIDGET = _descriptor.Descriptor( - name="Widget", - full_name="google.monitoring.dashboard.v1.Widget", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="title", - full_name="google.monitoring.dashboard.v1.Widget.title", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="xy_chart", - full_name="google.monitoring.dashboard.v1.Widget.xy_chart", - index=1, - number=2, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="scorecard", - full_name="google.monitoring.dashboard.v1.Widget.scorecard", - index=2, - number=3, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="text", - full_name="google.monitoring.dashboard.v1.Widget.text", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="blank", - full_name="google.monitoring.dashboard.v1.Widget.blank", - index=4, - number=5, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[ - _descriptor.OneofDescriptor( - name="content", - full_name="google.monitoring.dashboard.v1.Widget.content", - index=0, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[], - ), - ], - serialized_start=327, - serialized_end=586, -) - -_WIDGET.fields_by_name[ - "xy_chart" -].message_type = ( - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_xychart__pb2._XYCHART -) -_WIDGET.fields_by_name[ - "scorecard" -].message_type = ( - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_scorecard__pb2._SCORECARD -) -_WIDGET.fields_by_name[ - "text" -].message_type = ( - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_text__pb2._TEXT -) -_WIDGET.fields_by_name["blank"].message_type = google_dot_protobuf_dot_empty__pb2._EMPTY -_WIDGET.oneofs_by_name["content"].fields.append(_WIDGET.fields_by_name["xy_chart"]) -_WIDGET.fields_by_name["xy_chart"].containing_oneof = _WIDGET.oneofs_by_name["content"] -_WIDGET.oneofs_by_name["content"].fields.append(_WIDGET.fields_by_name["scorecard"]) -_WIDGET.fields_by_name["scorecard"].containing_oneof = _WIDGET.oneofs_by_name["content"] -_WIDGET.oneofs_by_name["content"].fields.append(_WIDGET.fields_by_name["text"]) -_WIDGET.fields_by_name["text"].containing_oneof = _WIDGET.oneofs_by_name["content"] -_WIDGET.oneofs_by_name["content"].fields.append(_WIDGET.fields_by_name["blank"]) -_WIDGET.fields_by_name["blank"].containing_oneof = _WIDGET.oneofs_by_name["content"] -DESCRIPTOR.message_types_by_name["Widget"] = _WIDGET -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -Widget = _reflection.GeneratedProtocolMessageType( - "Widget", - (_message.Message,), - { - "DESCRIPTOR": _WIDGET, - "__module__": "google.cloud.monitoring_dashboard.v1.proto.widget_pb2", - "__doc__": """Widget contains a single dashboard component and configuration of how - to present the component in the dashboard. - - Attributes: - title: - Optional. The title of the widget. - content: - Content defines the component used to populate the widget. - xy_chart: - A chart of time series data. - scorecard: - A scorecard summarizing time series data. - text: - A raw string or markdown displaying textual content. - blank: - A blank space. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.Widget) - }, -) -_sym_db.RegisterMessage(Widget) - - -DESCRIPTOR._options = None -_WIDGET.fields_by_name["title"]._options = None -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/monitoring_dashboard/v1/proto/widget_pb2_grpc.py b/google/cloud/monitoring_dashboard/v1/proto/widget_pb2_grpc.py deleted file mode 100644 index b662812..0000000 --- a/google/cloud/monitoring_dashboard/v1/proto/widget_pb2_grpc.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! - -import grpc diff --git a/google/cloud/monitoring_dashboard/v1/proto/xychart_pb2.py b/google/cloud/monitoring_dashboard/v1/proto/xychart_pb2.py deleted file mode 100644 index 1212df8..0000000 --- a/google/cloud/monitoring_dashboard/v1/proto/xychart_pb2.py +++ /dev/null @@ -1,661 +0,0 @@ -# -*- coding: utf-8 -*- - -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by the protocol buffer compiler. DO NOT EDIT! -# source: google/cloud/monitoring_dashboard_v1/proto/xychart.proto - -from google.protobuf import descriptor as _descriptor -from google.protobuf import message as _message -from google.protobuf import reflection as _reflection -from google.protobuf import symbol_database as _symbol_database - -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.cloud.monitoring_dashboard.v1.proto import ( - metrics_pb2 as google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_metrics__pb2, -) -from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 - - -DESCRIPTOR = _descriptor.FileDescriptor( - name="google/cloud/monitoring_dashboard_v1/proto/xychart.proto", - package="google.monitoring.dashboard.v1", - syntax="proto3", - serialized_options=b'\n"com.google.monitoring.dashboard.v1B\014XyChartProtoP\001ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\352\002(Google::Cloud::Monitoring::Dashboard::V1', - create_key=_descriptor._internal_create_key, - serialized_pb=b'\n8google/cloud/monitoring_dashboard_v1/proto/xychart.proto\x12\x1egoogle.monitoring.dashboard.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x38google/cloud/monitoring_dashboard_v1/proto/metrics.proto\x1a\x1egoogle/protobuf/duration.proto"\xfd\x06\n\x07XyChart\x12G\n\tdata_sets\x18\x01 \x03(\x0b\x32/.google.monitoring.dashboard.v1.XyChart.DataSetB\x03\xe0\x41\x02\x12\x35\n\x12timeshift_duration\x18\x04 \x01(\x0b\x32\x19.google.protobuf.Duration\x12=\n\nthresholds\x18\x05 \x03(\x0b\x32).google.monitoring.dashboard.v1.Threshold\x12<\n\x06x_axis\x18\x06 \x01(\x0b\x32,.google.monitoring.dashboard.v1.XyChart.Axis\x12<\n\x06y_axis\x18\x07 \x01(\x0b\x32,.google.monitoring.dashboard.v1.XyChart.Axis\x12\x43\n\rchart_options\x18\x08 \x01(\x0b\x32,.google.monitoring.dashboard.v1.ChartOptions\x1a\xdf\x02\n\x07\x44\x61taSet\x12O\n\x11time_series_query\x18\x01 \x01(\x0b\x32/.google.monitoring.dashboard.v1.TimeSeriesQueryB\x03\xe0\x41\x02\x12K\n\tplot_type\x18\x02 \x01(\x0e\x32\x38.google.monitoring.dashboard.v1.XyChart.DataSet.PlotType\x12\x17\n\x0flegend_template\x18\x03 \x01(\t\x12<\n\x14min_alignment_period\x18\x04 \x01(\x0b\x32\x19.google.protobuf.DurationB\x03\xe0\x41\x01"_\n\x08PlotType\x12\x19\n\x15PLOT_TYPE_UNSPECIFIED\x10\x00\x12\x08\n\x04LINE\x10\x01\x12\x10\n\x0cSTACKED_AREA\x10\x02\x12\x0f\n\x0bSTACKED_BAR\x10\x03\x12\x0b\n\x07HEATMAP\x10\x04\x1a\x8f\x01\n\x04\x41xis\x12\r\n\x05label\x18\x01 \x01(\t\x12\x41\n\x05scale\x18\x02 \x01(\x0e\x32\x32.google.monitoring.dashboard.v1.XyChart.Axis.Scale"5\n\x05Scale\x12\x15\n\x11SCALE_UNSPECIFIED\x10\x00\x12\n\n\x06LINEAR\x10\x01\x12\t\n\x05LOG10\x10\x02"\x8e\x01\n\x0c\x43hartOptions\x12?\n\x04mode\x18\x01 \x01(\x0e\x32\x31.google.monitoring.dashboard.v1.ChartOptions.Mode"=\n\x04Mode\x12\x14\n\x10MODE_UNSPECIFIED\x10\x00\x12\t\n\x05\x43OLOR\x10\x01\x12\t\n\x05X_RAY\x10\x02\x12\t\n\x05STATS\x10\x03\x42\xa8\x01\n"com.google.monitoring.dashboard.v1B\x0cXyChartProtoP\x01ZGgoogle.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard\xea\x02(Google::Cloud::Monitoring::Dashboard::V1b\x06proto3', - dependencies=[ - google_dot_api_dot_field__behavior__pb2.DESCRIPTOR, - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_metrics__pb2.DESCRIPTOR, - google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, - ], -) - - -_XYCHART_DATASET_PLOTTYPE = _descriptor.EnumDescriptor( - name="PlotType", - full_name="google.monitoring.dashboard.v1.XyChart.DataSet.PlotType", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="PLOT_TYPE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="LINE", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="STACKED_AREA", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="STACKED_BAR", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="HEATMAP", - index=4, - number=4, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=868, - serialized_end=963, -) -_sym_db.RegisterEnumDescriptor(_XYCHART_DATASET_PLOTTYPE) - -_XYCHART_AXIS_SCALE = _descriptor.EnumDescriptor( - name="Scale", - full_name="google.monitoring.dashboard.v1.XyChart.Axis.Scale", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="SCALE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="LINEAR", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="LOG10", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1056, - serialized_end=1109, -) -_sym_db.RegisterEnumDescriptor(_XYCHART_AXIS_SCALE) - -_CHARTOPTIONS_MODE = _descriptor.EnumDescriptor( - name="Mode", - full_name="google.monitoring.dashboard.v1.ChartOptions.Mode", - filename=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - values=[ - _descriptor.EnumValueDescriptor( - name="MODE_UNSPECIFIED", - index=0, - number=0, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="COLOR", - index=1, - number=1, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="X_RAY", - index=2, - number=2, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - _descriptor.EnumValueDescriptor( - name="STATS", - index=3, - number=3, - serialized_options=None, - type=None, - create_key=_descriptor._internal_create_key, - ), - ], - containing_type=None, - serialized_options=None, - serialized_start=1193, - serialized_end=1254, -) -_sym_db.RegisterEnumDescriptor(_CHARTOPTIONS_MODE) - - -_XYCHART_DATASET = _descriptor.Descriptor( - name="DataSet", - full_name="google.monitoring.dashboard.v1.XyChart.DataSet", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="time_series_query", - full_name="google.monitoring.dashboard.v1.XyChart.DataSet.time_series_query", - index=0, - number=1, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="plot_type", - full_name="google.monitoring.dashboard.v1.XyChart.DataSet.plot_type", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="legend_template", - full_name="google.monitoring.dashboard.v1.XyChart.DataSet.legend_template", - index=2, - number=3, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="min_alignment_period", - full_name="google.monitoring.dashboard.v1.XyChart.DataSet.min_alignment_period", - index=3, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\001", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_XYCHART_DATASET_PLOTTYPE,], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=612, - serialized_end=963, -) - -_XYCHART_AXIS = _descriptor.Descriptor( - name="Axis", - full_name="google.monitoring.dashboard.v1.XyChart.Axis", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="label", - full_name="google.monitoring.dashboard.v1.XyChart.Axis.label", - index=0, - number=1, - type=9, - cpp_type=9, - label=1, - has_default_value=False, - default_value=b"".decode("utf-8"), - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="scale", - full_name="google.monitoring.dashboard.v1.XyChart.Axis.scale", - index=1, - number=2, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_XYCHART_AXIS_SCALE,], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=966, - serialized_end=1109, -) - -_XYCHART = _descriptor.Descriptor( - name="XyChart", - full_name="google.monitoring.dashboard.v1.XyChart", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="data_sets", - full_name="google.monitoring.dashboard.v1.XyChart.data_sets", - index=0, - number=1, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=b"\340A\002", - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="timeshift_duration", - full_name="google.monitoring.dashboard.v1.XyChart.timeshift_duration", - index=1, - number=4, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="thresholds", - full_name="google.monitoring.dashboard.v1.XyChart.thresholds", - index=2, - number=5, - type=11, - cpp_type=10, - label=3, - has_default_value=False, - default_value=[], - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="x_axis", - full_name="google.monitoring.dashboard.v1.XyChart.x_axis", - index=3, - number=6, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="y_axis", - full_name="google.monitoring.dashboard.v1.XyChart.y_axis", - index=4, - number=7, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - _descriptor.FieldDescriptor( - name="chart_options", - full_name="google.monitoring.dashboard.v1.XyChart.chart_options", - index=5, - number=8, - type=11, - cpp_type=10, - label=1, - has_default_value=False, - default_value=None, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[_XYCHART_DATASET, _XYCHART_AXIS,], - enum_types=[], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=216, - serialized_end=1109, -) - - -_CHARTOPTIONS = _descriptor.Descriptor( - name="ChartOptions", - full_name="google.monitoring.dashboard.v1.ChartOptions", - filename=None, - file=DESCRIPTOR, - containing_type=None, - create_key=_descriptor._internal_create_key, - fields=[ - _descriptor.FieldDescriptor( - name="mode", - full_name="google.monitoring.dashboard.v1.ChartOptions.mode", - index=0, - number=1, - type=14, - cpp_type=8, - label=1, - has_default_value=False, - default_value=0, - message_type=None, - enum_type=None, - containing_type=None, - is_extension=False, - extension_scope=None, - serialized_options=None, - file=DESCRIPTOR, - create_key=_descriptor._internal_create_key, - ), - ], - extensions=[], - nested_types=[], - enum_types=[_CHARTOPTIONS_MODE,], - serialized_options=None, - is_extendable=False, - syntax="proto3", - extension_ranges=[], - oneofs=[], - serialized_start=1112, - serialized_end=1254, -) - -_XYCHART_DATASET.fields_by_name[ - "time_series_query" -].message_type = ( - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_metrics__pb2._TIMESERIESQUERY -) -_XYCHART_DATASET.fields_by_name["plot_type"].enum_type = _XYCHART_DATASET_PLOTTYPE -_XYCHART_DATASET.fields_by_name[ - "min_alignment_period" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_XYCHART_DATASET.containing_type = _XYCHART -_XYCHART_DATASET_PLOTTYPE.containing_type = _XYCHART_DATASET -_XYCHART_AXIS.fields_by_name["scale"].enum_type = _XYCHART_AXIS_SCALE -_XYCHART_AXIS.containing_type = _XYCHART -_XYCHART_AXIS_SCALE.containing_type = _XYCHART_AXIS -_XYCHART.fields_by_name["data_sets"].message_type = _XYCHART_DATASET -_XYCHART.fields_by_name[ - "timeshift_duration" -].message_type = google_dot_protobuf_dot_duration__pb2._DURATION -_XYCHART.fields_by_name[ - "thresholds" -].message_type = ( - google_dot_cloud_dot_monitoring__dashboard__v1_dot_proto_dot_metrics__pb2._THRESHOLD -) -_XYCHART.fields_by_name["x_axis"].message_type = _XYCHART_AXIS -_XYCHART.fields_by_name["y_axis"].message_type = _XYCHART_AXIS -_XYCHART.fields_by_name["chart_options"].message_type = _CHARTOPTIONS -_CHARTOPTIONS.fields_by_name["mode"].enum_type = _CHARTOPTIONS_MODE -_CHARTOPTIONS_MODE.containing_type = _CHARTOPTIONS -DESCRIPTOR.message_types_by_name["XyChart"] = _XYCHART -DESCRIPTOR.message_types_by_name["ChartOptions"] = _CHARTOPTIONS -_sym_db.RegisterFileDescriptor(DESCRIPTOR) - -XyChart = _reflection.GeneratedProtocolMessageType( - "XyChart", - (_message.Message,), - { - "DataSet": _reflection.GeneratedProtocolMessageType( - "DataSet", - (_message.Message,), - { - "DESCRIPTOR": _XYCHART_DATASET, - "__module__": "google.cloud.monitoring_dashboard.v1.proto.xychart_pb2", - "__doc__": """Groups a time series query definition with charting options. - - Attributes: - time_series_query: - Required. Fields for querying time series data from the - Stackdriver metrics API. - plot_type: - How this data should be plotted on the chart. - legend_template: - A template string for naming ``TimeSeries`` in the resulting - data set. This should be a string with interpolations of the - form ``${label_name}``, which will resolve to the label’s - value. - min_alignment_period: - Optional. The lower bound on data point frequency for this - data set, implemented by specifying the minimum alignment - period to use in a time series query For example, if the data - is published once every 10 minutes, the - ``min_alignment_period`` should be at least 10 minutes. It - would not make sense to fetch and align data at one minute - intervals. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.XyChart.DataSet) - }, - ), - "Axis": _reflection.GeneratedProtocolMessageType( - "Axis", - (_message.Message,), - { - "DESCRIPTOR": _XYCHART_AXIS, - "__module__": "google.cloud.monitoring_dashboard.v1.proto.xychart_pb2", - "__doc__": """A chart axis. - - Attributes: - label: - The label of the axis. - scale: - The axis scale. By default, a linear scale is used. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.XyChart.Axis) - }, - ), - "DESCRIPTOR": _XYCHART, - "__module__": "google.cloud.monitoring_dashboard.v1.proto.xychart_pb2", - "__doc__": """A chart that displays data on a 2D (X and Y axes) plane. - - Attributes: - data_sets: - Required. The data displayed in this chart. - timeshift_duration: - The duration used to display a comparison chart. A comparison - chart simultaneously shows values from two similar-length time - periods (e.g., week-over-week metrics). The duration must be - positive, and it can only be applied to charts with data sets - of LINE plot type. - thresholds: - Threshold lines drawn horizontally across the chart. - x_axis: - The properties applied to the X axis. - y_axis: - The properties applied to the Y axis. - chart_options: - Display options for the chart. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.XyChart) - }, -) -_sym_db.RegisterMessage(XyChart) -_sym_db.RegisterMessage(XyChart.DataSet) -_sym_db.RegisterMessage(XyChart.Axis) - -ChartOptions = _reflection.GeneratedProtocolMessageType( - "ChartOptions", - (_message.Message,), - { - "DESCRIPTOR": _CHARTOPTIONS, - "__module__": "google.cloud.monitoring_dashboard.v1.proto.xychart_pb2", - "__doc__": """Options to control visual rendering of a chart. - - Attributes: - mode: - The chart mode. - """, - # @@protoc_insertion_point(class_scope:google.monitoring.dashboard.v1.ChartOptions) - }, -) -_sym_db.RegisterMessage(ChartOptions) - - -DESCRIPTOR._options = None -_XYCHART_DATASET.fields_by_name["time_series_query"]._options = None -_XYCHART_DATASET.fields_by_name["min_alignment_period"]._options = None -_XYCHART.fields_by_name["data_sets"]._options = None -# @@protoc_insertion_point(module_scope) diff --git a/google/cloud/monitoring_dashboard/v1/proto/xychart_pb2_grpc.py b/google/cloud/monitoring_dashboard/v1/proto/xychart_pb2_grpc.py deleted file mode 100644 index b662812..0000000 --- a/google/cloud/monitoring_dashboard/v1/proto/xychart_pb2_grpc.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! - -import grpc diff --git a/google/cloud/monitoring_dashboard/v1/types.py b/google/cloud/monitoring_dashboard/v1/types.py deleted file mode 100644 index 3e20510..0000000 --- a/google/cloud/monitoring_dashboard/v1/types.py +++ /dev/null @@ -1,66 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - - -from __future__ import absolute_import -import sys - -from google.api_core.protobuf_helpers import get_messages - -from google.cloud.monitoring_dashboard.v1.proto import common_pb2 -from google.cloud.monitoring_dashboard.v1.proto import dashboard_pb2 -from google.cloud.monitoring_dashboard.v1.proto import dashboards_service_pb2 -from google.cloud.monitoring_dashboard.v1.proto import layouts_pb2 -from google.cloud.monitoring_dashboard.v1.proto import metrics_pb2 -from google.cloud.monitoring_dashboard.v1.proto import scorecard_pb2 -from google.cloud.monitoring_dashboard.v1.proto import text_pb2 -from google.cloud.monitoring_dashboard.v1.proto import widget_pb2 -from google.cloud.monitoring_dashboard.v1.proto import xychart_pb2 -from google.protobuf import duration_pb2 -from google.protobuf import empty_pb2 - - -_shared_modules = [ - duration_pb2, - empty_pb2, -] - -_local_modules = [ - common_pb2, - dashboard_pb2, - dashboards_service_pb2, - layouts_pb2, - metrics_pb2, - scorecard_pb2, - text_pb2, - widget_pb2, - xychart_pb2, -] - -names = [] - -for module in _shared_modules: # pragma: NO COVER - for name, message in get_messages(module).items(): - setattr(sys.modules[__name__], name, message) - names.append(name) -for module in _local_modules: - for name, message in get_messages(module).items(): - message.__module__ = "google.cloud.monitoring_dashboard.v1.types" - setattr(sys.modules[__name__], name, message) - names.append(name) - - -__all__ = tuple(sorted(names)) diff --git a/google/cloud/monitoring_dashboard_v1/__init__.py b/google/cloud/monitoring_dashboard_v1/__init__.py new file mode 100644 index 0000000..4c970ee --- /dev/null +++ b/google/cloud/monitoring_dashboard_v1/__init__.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .services.dashboards_service import DashboardsServiceClient +from .types.common import Aggregation +from .types.common import PickTimeSeriesFilter +from .types.common import StatisticalTimeSeriesFilter +from .types.dashboard import Dashboard +from .types.dashboards_service import CreateDashboardRequest +from .types.dashboards_service import DeleteDashboardRequest +from .types.dashboards_service import GetDashboardRequest +from .types.dashboards_service import ListDashboardsRequest +from .types.dashboards_service import ListDashboardsResponse +from .types.dashboards_service import UpdateDashboardRequest +from .types.layouts import ColumnLayout +from .types.layouts import GridLayout +from .types.layouts import RowLayout +from .types.metrics import SparkChartType +from .types.metrics import Threshold +from .types.metrics import TimeSeriesFilter +from .types.metrics import TimeSeriesFilterRatio +from .types.metrics import TimeSeriesQuery +from .types.scorecard import Scorecard +from .types.text import Text +from .types.widget import Widget +from .types.xychart import ChartOptions +from .types.xychart import XyChart + + +__all__ = ( + "Aggregation", + "ChartOptions", + "ColumnLayout", + "CreateDashboardRequest", + "Dashboard", + "DeleteDashboardRequest", + "GetDashboardRequest", + "GridLayout", + "ListDashboardsRequest", + "ListDashboardsResponse", + "PickTimeSeriesFilter", + "RowLayout", + "Scorecard", + "SparkChartType", + "StatisticalTimeSeriesFilter", + "Text", + "Threshold", + "TimeSeriesFilter", + "TimeSeriesFilterRatio", + "TimeSeriesQuery", + "UpdateDashboardRequest", + "Widget", + "XyChart", + "DashboardsServiceClient", +) diff --git a/google/cloud/monitoring_dashboard_v1/proto/common.proto b/google/cloud/monitoring_dashboard_v1/proto/common.proto new file mode 100644 index 0000000..6dedcf0 --- /dev/null +++ b/google/cloud/monitoring_dashboard_v1/proto/common.proto @@ -0,0 +1,449 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.monitoring.dashboard.v1; + +import "google/api/distribution.proto"; +import "google/protobuf/duration.proto"; + +option go_package = "google.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard"; +option java_multiple_files = true; +option java_outer_classname = "CommonProto"; +option java_package = "com.google.monitoring.dashboard.v1"; +option ruby_package = "Google::Cloud::Monitoring::Dashboard::V1"; + +// Describes how to combine multiple time series to provide a different view of +// the data. Aggregation of time series is done in two steps. First, each time +// series in the set is _aligned_ to the same time interval boundaries, then the +// set of time series is optionally _reduced_ in number. +// +// Alignment consists of applying the `per_series_aligner` operation +// to each time series after its data has been divided into regular +// `alignment_period` time intervals. This process takes _all_ of the data +// points in an alignment period, applies a mathematical transformation such as +// averaging, minimum, maximum, delta, etc., and converts them into a single +// data point per period. +// +// Reduction is when the aligned and transformed time series can optionally be +// combined, reducing the number of time series through similar mathematical +// transformations. Reduction involves applying a `cross_series_reducer` to +// all the time series, optionally sorting the time series into subsets with +// `group_by_fields`, and applying the reducer to each subset. +// +// The raw time series data can contain a huge amount of information from +// multiple sources. Alignment and reduction transforms this mass of data into +// a more manageable and representative collection of data, for example "the +// 95% latency across the average of all tasks in a cluster". This +// representative data can be more easily graphed and comprehended, and the +// individual time series data is still available for later drilldown. For more +// details, see [Filtering and +// aggregation](https://cloud.google.com/monitoring/api/v3/aggregation). +message Aggregation { + // The `Aligner` specifies the operation that will be applied to the data + // points in each alignment period in a time series. Except for + // `ALIGN_NONE`, which specifies that no operation be applied, each alignment + // operation replaces the set of data values in each alignment period with + // a single value: the result of applying the operation to the data values. + // An aligned time series has a single data value at the end of each + // `alignment_period`. + // + // An alignment operation can change the data type of the values, too. For + // example, if you apply a counting operation to boolean values, the data + // `value_type` in the original time series is `BOOLEAN`, but the `value_type` + // in the aligned result is `INT64`. + enum Aligner { + // No alignment. Raw data is returned. Not valid if cross-series reduction + // is requested. The `value_type` of the result is the same as the + // `value_type` of the input. + ALIGN_NONE = 0; + + // Align and convert to + // [DELTA][google.api.MetricDescriptor.MetricKind.DELTA]. + // The output is `delta = y1 - y0`. + // + // This alignment is valid for + // [CUMULATIVE][google.api.MetricDescriptor.MetricKind.CUMULATIVE] and + // `DELTA` metrics. If the selected alignment period results in periods + // with no data, then the aligned value for such a period is created by + // interpolation. The `value_type` of the aligned result is the same as + // the `value_type` of the input. + ALIGN_DELTA = 1; + + // Align and convert to a rate. The result is computed as + // `rate = (y1 - y0)/(t1 - t0)`, or "delta over time". + // Think of this aligner as providing the slope of the line that passes + // through the value at the start and at the end of the `alignment_period`. + // + // This aligner is valid for `CUMULATIVE` + // and `DELTA` metrics with numeric values. If the selected alignment + // period results in periods with no data, then the aligned value for + // such a period is created by interpolation. The output is a `GAUGE` + // metric with `value_type` `DOUBLE`. + // + // If, by "rate", you mean "percentage change", see the + // `ALIGN_PERCENT_CHANGE` aligner instead. + ALIGN_RATE = 2; + + // Align by interpolating between adjacent points around the alignment + // period boundary. This aligner is valid for `GAUGE` metrics with + // numeric values. The `value_type` of the aligned result is the same as the + // `value_type` of the input. + ALIGN_INTERPOLATE = 3; + + // Align by moving the most recent data point before the end of the + // alignment period to the boundary at the end of the alignment + // period. This aligner is valid for `GAUGE` metrics. The `value_type` of + // the aligned result is the same as the `value_type` of the input. + ALIGN_NEXT_OLDER = 4; + + // Align the time series by returning the minimum value in each alignment + // period. This aligner is valid for `GAUGE` and `DELTA` metrics with + // numeric values. The `value_type` of the aligned result is the same as + // the `value_type` of the input. + ALIGN_MIN = 10; + + // Align the time series by returning the maximum value in each alignment + // period. This aligner is valid for `GAUGE` and `DELTA` metrics with + // numeric values. The `value_type` of the aligned result is the same as + // the `value_type` of the input. + ALIGN_MAX = 11; + + // Align the time series by returning the mean value in each alignment + // period. This aligner is valid for `GAUGE` and `DELTA` metrics with + // numeric values. The `value_type` of the aligned result is `DOUBLE`. + ALIGN_MEAN = 12; + + // Align the time series by returning the number of values in each alignment + // period. This aligner is valid for `GAUGE` and `DELTA` metrics with + // numeric or Boolean values. The `value_type` of the aligned result is + // `INT64`. + ALIGN_COUNT = 13; + + // Align the time series by returning the sum of the values in each + // alignment period. This aligner is valid for `GAUGE` and `DELTA` + // metrics with numeric and distribution values. The `value_type` of the + // aligned result is the same as the `value_type` of the input. + ALIGN_SUM = 14; + + // Align the time series by returning the standard deviation of the values + // in each alignment period. This aligner is valid for `GAUGE` and + // `DELTA` metrics with numeric values. The `value_type` of the output is + // `DOUBLE`. + ALIGN_STDDEV = 15; + + // Align the time series by returning the number of `True` values in + // each alignment period. This aligner is valid for `GAUGE` metrics with + // Boolean values. The `value_type` of the output is `INT64`. + ALIGN_COUNT_TRUE = 16; + + // Align the time series by returning the number of `False` values in + // each alignment period. This aligner is valid for `GAUGE` metrics with + // Boolean values. The `value_type` of the output is `INT64`. + ALIGN_COUNT_FALSE = 24; + + // Align the time series by returning the ratio of the number of `True` + // values to the total number of values in each alignment period. This + // aligner is valid for `GAUGE` metrics with Boolean values. The output + // value is in the range [0.0, 1.0] and has `value_type` `DOUBLE`. + ALIGN_FRACTION_TRUE = 17; + + // Align the time series by using [percentile + // aggregation](https://en.wikipedia.org/wiki/Percentile). The resulting + // data point in each alignment period is the 99th percentile of all data + // points in the period. This aligner is valid for `GAUGE` and `DELTA` + // metrics with distribution values. The output is a `GAUGE` metric with + // `value_type` `DOUBLE`. + ALIGN_PERCENTILE_99 = 18; + + // Align the time series by using [percentile + // aggregation](https://en.wikipedia.org/wiki/Percentile). The resulting + // data point in each alignment period is the 95th percentile of all data + // points in the period. This aligner is valid for `GAUGE` and `DELTA` + // metrics with distribution values. The output is a `GAUGE` metric with + // `value_type` `DOUBLE`. + ALIGN_PERCENTILE_95 = 19; + + // Align the time series by using [percentile + // aggregation](https://en.wikipedia.org/wiki/Percentile). The resulting + // data point in each alignment period is the 50th percentile of all data + // points in the period. This aligner is valid for `GAUGE` and `DELTA` + // metrics with distribution values. The output is a `GAUGE` metric with + // `value_type` `DOUBLE`. + ALIGN_PERCENTILE_50 = 20; + + // Align the time series by using [percentile + // aggregation](https://en.wikipedia.org/wiki/Percentile). The resulting + // data point in each alignment period is the 5th percentile of all data + // points in the period. This aligner is valid for `GAUGE` and `DELTA` + // metrics with distribution values. The output is a `GAUGE` metric with + // `value_type` `DOUBLE`. + ALIGN_PERCENTILE_05 = 21; + + // Align and convert to a percentage change. This aligner is valid for + // `GAUGE` and `DELTA` metrics with numeric values. This alignment returns + // `((current - previous)/previous) * 100`, where the value of `previous` is + // determined based on the `alignment_period`. + // + // If the values of `current` and `previous` are both 0, then the returned + // value is 0. If only `previous` is 0, the returned value is infinity. + // + // A 10-minute moving mean is computed at each point of the alignment period + // prior to the above calculation to smooth the metric and prevent false + // positives from very short-lived spikes. The moving mean is only + // applicable for data whose values are `>= 0`. Any values `< 0` are + // treated as a missing datapoint, and are ignored. While `DELTA` + // metrics are accepted by this alignment, special care should be taken that + // the values for the metric will always be positive. The output is a + // `GAUGE` metric with `value_type` `DOUBLE`. + ALIGN_PERCENT_CHANGE = 23; + } + + // A Reducer operation describes how to aggregate data points from multiple + // time series into a single time series, where the value of each data point + // in the resulting series is a function of all the already aligned values in + // the input time series. + enum Reducer { + // No cross-time series reduction. The output of the `Aligner` is + // returned. + REDUCE_NONE = 0; + + // Reduce by computing the mean value across time series for each + // alignment period. This reducer is valid for + // [DELTA][google.api.MetricDescriptor.MetricKind.DELTA] and + // [GAUGE][google.api.MetricDescriptor.MetricKind.GAUGE] metrics with + // numeric or distribution values. The `value_type` of the output is + // [DOUBLE][google.api.MetricDescriptor.ValueType.DOUBLE]. + REDUCE_MEAN = 1; + + // Reduce by computing the minimum value across time series for each + // alignment period. This reducer is valid for `DELTA` and `GAUGE` metrics + // with numeric values. The `value_type` of the output is the same as the + // `value_type` of the input. + REDUCE_MIN = 2; + + // Reduce by computing the maximum value across time series for each + // alignment period. This reducer is valid for `DELTA` and `GAUGE` metrics + // with numeric values. The `value_type` of the output is the same as the + // `value_type` of the input. + REDUCE_MAX = 3; + + // Reduce by computing the sum across time series for each + // alignment period. This reducer is valid for `DELTA` and `GAUGE` metrics + // with numeric and distribution values. The `value_type` of the output is + // the same as the `value_type` of the input. + REDUCE_SUM = 4; + + // Reduce by computing the standard deviation across time series + // for each alignment period. This reducer is valid for `DELTA` and + // `GAUGE` metrics with numeric or distribution values. The `value_type` + // of the output is `DOUBLE`. + REDUCE_STDDEV = 5; + + // Reduce by computing the number of data points across time series + // for each alignment period. This reducer is valid for `DELTA` and + // `GAUGE` metrics of numeric, Boolean, distribution, and string + // `value_type`. The `value_type` of the output is `INT64`. + REDUCE_COUNT = 6; + + // Reduce by computing the number of `True`-valued data points across time + // series for each alignment period. This reducer is valid for `DELTA` and + // `GAUGE` metrics of Boolean `value_type`. The `value_type` of the output + // is `INT64`. + REDUCE_COUNT_TRUE = 7; + + // Reduce by computing the number of `False`-valued data points across time + // series for each alignment period. This reducer is valid for `DELTA` and + // `GAUGE` metrics of Boolean `value_type`. The `value_type` of the output + // is `INT64`. + REDUCE_COUNT_FALSE = 15; + + // Reduce by computing the ratio of the number of `True`-valued data points + // to the total number of data points for each alignment period. This + // reducer is valid for `DELTA` and `GAUGE` metrics of Boolean `value_type`. + // The output value is in the range [0.0, 1.0] and has `value_type` + // `DOUBLE`. + REDUCE_FRACTION_TRUE = 8; + + // Reduce by computing the [99th + // percentile](https://en.wikipedia.org/wiki/Percentile) of data points + // across time series for each alignment period. This reducer is valid for + // `GAUGE` and `DELTA` metrics of numeric and distribution type. The value + // of the output is `DOUBLE`. + REDUCE_PERCENTILE_99 = 9; + + // Reduce by computing the [95th + // percentile](https://en.wikipedia.org/wiki/Percentile) of data points + // across time series for each alignment period. This reducer is valid for + // `GAUGE` and `DELTA` metrics of numeric and distribution type. The value + // of the output is `DOUBLE`. + REDUCE_PERCENTILE_95 = 10; + + // Reduce by computing the [50th + // percentile](https://en.wikipedia.org/wiki/Percentile) of data points + // across time series for each alignment period. This reducer is valid for + // `GAUGE` and `DELTA` metrics of numeric and distribution type. The value + // of the output is `DOUBLE`. + REDUCE_PERCENTILE_50 = 11; + + // Reduce by computing the [5th + // percentile](https://en.wikipedia.org/wiki/Percentile) of data points + // across time series for each alignment period. This reducer is valid for + // `GAUGE` and `DELTA` metrics of numeric and distribution type. The value + // of the output is `DOUBLE`. + REDUCE_PERCENTILE_05 = 12; + } + + // The `alignment_period` specifies a time interval, in seconds, that is used + // to divide the data in all the + // [time series][google.monitoring.v3.TimeSeries] into consistent blocks of + // time. This will be done before the per-series aligner can be applied to + // the data. + // + // The value must be at least 60 seconds. If a per-series aligner other than + // `ALIGN_NONE` is specified, this field is required or an error is returned. + // If no per-series aligner is specified, or the aligner `ALIGN_NONE` is + // specified, then this field is ignored. + google.protobuf.Duration alignment_period = 1; + + // An `Aligner` describes how to bring the data points in a single + // time series into temporal alignment. Except for `ALIGN_NONE`, all + // alignments cause all the data points in an `alignment_period` to be + // mathematically grouped together, resulting in a single data point for + // each `alignment_period` with end timestamp at the end of the period. + // + // Not all alignment operations may be applied to all time series. The valid + // choices depend on the `metric_kind` and `value_type` of the original time + // series. Alignment can change the `metric_kind` or the `value_type` of + // the time series. + // + // Time series data must be aligned in order to perform cross-time + // series reduction. If `cross_series_reducer` is specified, then + // `per_series_aligner` must be specified and not equal to `ALIGN_NONE` + // and `alignment_period` must be specified; otherwise, an error is + // returned. + Aligner per_series_aligner = 2; + + // The reduction operation to be used to combine time series into a single + // time series, where the value of each data point in the resulting series is + // a function of all the already aligned values in the input time series. + // + // Not all reducer operations can be applied to all time series. The valid + // choices depend on the `metric_kind` and the `value_type` of the original + // time series. Reduction can yield a time series with a different + // `metric_kind` or `value_type` than the input time series. + // + // Time series data must first be aligned (see `per_series_aligner`) in order + // to perform cross-time series reduction. If `cross_series_reducer` is + // specified, then `per_series_aligner` must be specified, and must not be + // `ALIGN_NONE`. An `alignment_period` must also be specified; otherwise, an + // error is returned. + Reducer cross_series_reducer = 4; + + // The set of fields to preserve when `cross_series_reducer` is + // specified. The `group_by_fields` determine how the time series are + // partitioned into subsets prior to applying the aggregation + // operation. Each subset contains time series that have the same + // value for each of the grouping fields. Each individual time + // series is a member of exactly one subset. The + // `cross_series_reducer` is applied to each subset of time series. + // It is not possible to reduce across different resource types, so + // this field implicitly contains `resource.type`. Fields not + // specified in `group_by_fields` are aggregated away. If + // `group_by_fields` is not specified and all the time series have + // the same resource type, then the time series are aggregated into + // a single output time series. If `cross_series_reducer` is not + // defined, this field is ignored. + repeated string group_by_fields = 5; +} + +// Describes a ranking-based time series filter. Each input time series is +// ranked with an aligner. The filter will allow up to `num_time_series` time +// series to pass through it, selecting them based on the relative ranking. +// +// For example, if `ranking_method` is `METHOD_MEAN`,`direction` is `BOTTOM`, +// and `num_time_series` is 3, then the 3 times series with the lowest mean +// values will pass through the filter. +message PickTimeSeriesFilter { + // The value reducers that can be applied to a `PickTimeSeriesFilter`. + enum Method { + // Not allowed. You must specify a different `Method` if you specify a + // `PickTimeSeriesFilter`. + METHOD_UNSPECIFIED = 0; + + // Select the mean of all values. + METHOD_MEAN = 1; + + // Select the maximum value. + METHOD_MAX = 2; + + // Select the minimum value. + METHOD_MIN = 3; + + // Compute the sum of all values. + METHOD_SUM = 4; + + // Select the most recent value. + METHOD_LATEST = 5; + } + + // Describes the ranking directions. + enum Direction { + // Not allowed. You must specify a different `Direction` if you specify a + // `PickTimeSeriesFilter`. + DIRECTION_UNSPECIFIED = 0; + + // Pass the highest `num_time_series` ranking inputs. + TOP = 1; + + // Pass the lowest `num_time_series` ranking inputs. + BOTTOM = 2; + } + + // `ranking_method` is applied to each time series independently to produce + // the value which will be used to compare the time series to other time + // series. + Method ranking_method = 1; + + // How many time series to allow to pass through the filter. + int32 num_time_series = 2; + + // How to use the ranking to select time series that pass through the filter. + Direction direction = 3; +} + +// A filter that ranks streams based on their statistical relation to other +// streams in a request. +// Note: This field is deprecated and completely ignored by the API. +message StatisticalTimeSeriesFilter { + // The filter methods that can be applied to a stream. + enum Method { + // Not allowed in well-formed requests. + METHOD_UNSPECIFIED = 0; + + // Compute the outlier score of each stream. + METHOD_CLUSTER_OUTLIER = 1; + } + + // `rankingMethod` is applied to a set of time series, and then the produced + // value for each individual time series is used to compare a given time + // series to others. + // These are methods that cannot be applied stream-by-stream, but rather + // require the full context of a request to evaluate time series. + Method ranking_method = 1; + + // How many time series to output. + int32 num_time_series = 2; +} diff --git a/google/cloud/monitoring_dashboard_v1/proto/dashboard.proto b/google/cloud/monitoring_dashboard_v1/proto/dashboard.proto new file mode 100644 index 0000000..7a25776 --- /dev/null +++ b/google/cloud/monitoring_dashboard_v1/proto/dashboard.proto @@ -0,0 +1,66 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.monitoring.dashboard.v1; + +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/monitoring/dashboard/v1/layouts.proto"; + +option go_package = "google.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard"; +option java_multiple_files = true; +option java_outer_classname = "DashboardsProto"; +option java_package = "com.google.monitoring.dashboard.v1"; +option ruby_package = "Google::Cloud::Monitoring::Dashboard::V1"; + +// A Google Stackdriver dashboard. Dashboards define the content and layout +// of pages in the Stackdriver web application. +message Dashboard { + option (google.api.resource) = { + type: "monitoring.googleapis.com/Dashboard" + pattern: "projects/{project}/dashboards/{dashboard}" + }; + + // Immutable. The resource name of the dashboard. + string name = 1 [(google.api.field_behavior) = IMMUTABLE]; + + // Required. The mutable, human-readable name. + string display_name = 2 [(google.api.field_behavior) = REQUIRED]; + + // `etag` is used for optimistic concurrency control as a way to help + // prevent simultaneous updates of a policy from overwriting each other. + // An `etag` is returned in the response to `GetDashboard`, and + // users are expected to put that etag in the request to `UpdateDashboard` to + // ensure that their change will be applied to the same version of the + // Dashboard configuration. The field should not be passed during + // dashboard creation. + string etag = 4; + + // A dashboard's root container element that defines the layout style. + oneof layout { + // Content is arranged with a basic layout that re-flows a simple list of + // informational elements like widgets or tiles. + GridLayout grid_layout = 5; + + // The content is divided into equally spaced rows and the widgets are + // arranged horizontally. + RowLayout row_layout = 8; + + // The content is divided into equally spaced columns and the widgets are + // arranged vertically. + ColumnLayout column_layout = 9; + } +} diff --git a/google/cloud/monitoring_dashboard_v1/proto/dashboards_service.proto b/google/cloud/monitoring_dashboard_v1/proto/dashboards_service.proto new file mode 100644 index 0000000..a7cbef5 --- /dev/null +++ b/google/cloud/monitoring_dashboard_v1/proto/dashboards_service.proto @@ -0,0 +1,179 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.monitoring.dashboard.v1; + +import "google/api/annotations.proto"; +import "google/api/field_behavior.proto"; +import "google/api/resource.proto"; +import "google/monitoring/dashboard/v1/dashboard.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/field_mask.proto"; +import "google/api/client.proto"; + +option go_package = "google.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard"; +option java_multiple_files = true; +option java_outer_classname = "DashboardsServiceProto"; +option java_package = "com.google.monitoring.dashboard.v1"; +option ruby_package = "Google::Cloud::Monitoring::Dashboard::V1"; + +// Manages Stackdriver dashboards. A dashboard is an arrangement of data display +// widgets in a specific layout. +service DashboardsService { + option (google.api.default_host) = "monitoring.googleapis.com"; + option (google.api.oauth_scopes) = + "https://www.googleapis.com/auth/cloud-platform," + "https://www.googleapis.com/auth/monitoring," + "https://www.googleapis.com/auth/monitoring.read," + "https://www.googleapis.com/auth/monitoring.write"; + + // Creates a new custom dashboard. + // + // This method requires the `monitoring.dashboards.create` permission + // on the specified project. For more information, see + // [Google Cloud IAM](https://cloud.google.com/iam). + rpc CreateDashboard(CreateDashboardRequest) returns (Dashboard) { + option (google.api.http) = { + post: "/v1/{parent=projects/*}/dashboards" + body: "dashboard" + }; + } + + // Lists the existing dashboards. + // + // This method requires the `monitoring.dashboards.list` permission + // on the specified project. For more information, see + // [Google Cloud IAM](https://cloud.google.com/iam). + rpc ListDashboards(ListDashboardsRequest) returns (ListDashboardsResponse) { + option (google.api.http) = { + get: "/v1/{parent=projects/*}/dashboards" + }; + } + + // Fetches a specific dashboard. + // + // This method requires the `monitoring.dashboards.get` permission + // on the specified dashboard. For more information, see + // [Google Cloud IAM](https://cloud.google.com/iam). + rpc GetDashboard(GetDashboardRequest) returns (Dashboard) { + option (google.api.http) = { + get: "/v1/{name=projects/*/dashboards/*}" + }; + } + + // Deletes an existing custom dashboard. + // + // This method requires the `monitoring.dashboards.delete` permission + // on the specified dashboard. For more information, see + // [Google Cloud IAM](https://cloud.google.com/iam). + rpc DeleteDashboard(DeleteDashboardRequest) returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1/{name=projects/*/dashboards/*}" + }; + } + + // Replaces an existing custom dashboard with a new definition. + // + // This method requires the `monitoring.dashboards.update` permission + // on the specified dashboard. For more information, see + // [Google Cloud IAM](https://cloud.google.com/iam). + rpc UpdateDashboard(UpdateDashboardRequest) returns (Dashboard) { + option (google.api.http) = { + patch: "/v1/{dashboard.name=projects/*/dashboards/*}" + body: "dashboard" + }; + } +} + +// The `CreateDashboard` request. +message CreateDashboardRequest { + // Required. The project on which to execute the request. The format is: + // + // projects/[PROJECT_ID_OR_NUMBER] + // + // The `[PROJECT_ID_OR_NUMBER]` must match the dashboard resource name. + string parent = 1 [(google.api.field_behavior) = REQUIRED]; + + // Required. The initial dashboard specification. + Dashboard dashboard = 2 [(google.api.field_behavior) = REQUIRED]; +} + +// The `ListDashboards` request. +message ListDashboardsRequest { + // Required. The scope of the dashboards to list. The format is: + // + // projects/[PROJECT_ID_OR_NUMBER] + string parent = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "cloudresourcemanager.googleapis.com/Project" + } + ]; + + // A positive number that is the maximum number of results to return. + // If unspecified, a default of 1000 is used. + int32 page_size = 2; + + // If this field is not empty then it must contain the `nextPageToken` value + // returned by a previous call to this method. Using this field causes the + // method to return additional results from the previous method call. + string page_token = 3; +} + +// The `ListDashboards` request. +message ListDashboardsResponse { + // The list of requested dashboards. + repeated Dashboard dashboards = 1; + + // If there are more results than have been returned, then this field is set + // to a non-empty value. To see the additional results, + // use that value as `page_token` in the next call to this method. + string next_page_token = 2; +} + +// The `GetDashboard` request. +message GetDashboardRequest { + // Required. The resource name of the Dashboard. The format is one of: + // + // - `dashboards/[DASHBOARD_ID]` (for system dashboards) + // - `projects/[PROJECT_ID_OR_NUMBER]/dashboards/[DASHBOARD_ID]` + // (for custom dashboards). + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "monitoring.googleapis.com/Dashboard" + } + ]; +} + +// The `DeleteDashboard` request. +message DeleteDashboardRequest { + // Required. The resource name of the Dashboard. The format is: + // + // projects/[PROJECT_ID_OR_NUMBER]/dashboards/[DASHBOARD_ID] + string name = 1 [ + (google.api.field_behavior) = REQUIRED, + (google.api.resource_reference) = { + type: "monitoring.googleapis.com/Dashboard" + } + ]; +} + +// The `UpdateDashboard` request. +message UpdateDashboardRequest { + // Required. The dashboard that will replace the existing dashboard. + Dashboard dashboard = 1 [(google.api.field_behavior) = REQUIRED]; +} diff --git a/google/cloud/monitoring_dashboard_v1/proto/drilldowns.proto b/google/cloud/monitoring_dashboard_v1/proto/drilldowns.proto new file mode 100644 index 0000000..0080df5 --- /dev/null +++ b/google/cloud/monitoring_dashboard_v1/proto/drilldowns.proto @@ -0,0 +1,25 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.monitoring.dashboard.v1; + +import "google/monitoring/dashboard/v1/common.proto"; + +option go_package = "google.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard"; +option java_multiple_files = true; +option java_outer_classname = "DrilldownsProto"; +option java_package = "com.google.monitoring.dashboard.v1"; +option ruby_package = "Google::Cloud::Monitoring::Dashboard::V1"; diff --git a/google/cloud/monitoring_dashboard_v1/proto/layouts.proto b/google/cloud/monitoring_dashboard_v1/proto/layouts.proto new file mode 100644 index 0000000..acc0517 --- /dev/null +++ b/google/cloud/monitoring_dashboard_v1/proto/layouts.proto @@ -0,0 +1,74 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.monitoring.dashboard.v1; + +import "google/monitoring/dashboard/v1/widget.proto"; + +option go_package = "google.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard"; +option java_multiple_files = true; +option java_outer_classname = "LayoutsProto"; +option java_package = "com.google.monitoring.dashboard.v1"; +option ruby_package = "Google::Cloud::Monitoring::Dashboard::V1"; + +// A basic layout divides the available space into vertical columns of equal +// width and arranges a list of widgets using a row-first strategy. +message GridLayout { + // The number of columns into which the view's width is divided. If omitted + // or set to zero, a system default will be used while rendering. + int64 columns = 1; + + // The informational elements that are arranged into the columns row-first. + repeated Widget widgets = 2; +} + +// A simplified layout that divides the available space into rows +// and arranges a set of widgets horizontally in each row. +message RowLayout { + // Defines the layout properties and content for a row. + message Row { + // The relative weight of this row. The row weight is used to adjust the + // height of rows on the screen (relative to peers). Greater the weight, + // greater the height of the row on the screen. If omitted, a value + // of 1 is used while rendering. + int64 weight = 1; + + // The display widgets arranged horizontally in this row. + repeated Widget widgets = 2; + } + + // The rows of content to display. + repeated Row rows = 1; +} + +// A simplified layout that divides the available space into vertical columns +// and arranges a set of widgets vertically in each column. +message ColumnLayout { + // Defines the layout properties and content for a column. + message Column { + // The relative weight of this column. The column weight is used to adjust + // the width of columns on the screen (relative to peers). + // Greater the weight, greater the width of the column on the screen. + // If omitted, a value of 1 is used while rendering. + int64 weight = 1; + + // The display widgets arranged vertically in this column. + repeated Widget widgets = 2; + } + + // The columns of content to display. + repeated Column columns = 1; +} diff --git a/google/cloud/monitoring_dashboard_v1/proto/metrics.proto b/google/cloud/monitoring_dashboard_v1/proto/metrics.proto new file mode 100644 index 0000000..2fff1d2 --- /dev/null +++ b/google/cloud/monitoring_dashboard_v1/proto/metrics.proto @@ -0,0 +1,174 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.monitoring.dashboard.v1; + +import "google/api/field_behavior.proto"; +import "google/monitoring/dashboard/v1/common.proto"; + +option go_package = "google.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard"; +option java_multiple_files = true; +option java_outer_classname = "MetricsProto"; +option java_package = "com.google.monitoring.dashboard.v1"; +option ruby_package = "Google::Cloud::Monitoring::Dashboard::V1"; + +// TimeSeriesQuery collects the set of supported methods for querying time +// series data from the Stackdriver metrics API. +message TimeSeriesQuery { + // Parameters needed to obtain data for the chart. + oneof source { + // Filter parameters to fetch time series. + TimeSeriesFilter time_series_filter = 1; + + // Parameters to fetch a ratio between two time series filters. + TimeSeriesFilterRatio time_series_filter_ratio = 2; + + // A query used to fetch time series. + string time_series_query_language = 3; + } + + // The unit of data contained in fetched time series. If non-empty, this + // unit will override any unit that accompanies fetched data. The format is + // the same as the + // [`unit`](https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.metricDescriptors) + // field in `MetricDescriptor`. + string unit_override = 5; +} + +// A filter that defines a subset of time series data that is displayed in a +// widget. Time series data is fetched using the +// [`ListTimeSeries`](https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.timeSeries/list) +// method. +message TimeSeriesFilter { + // Required. The [monitoring filter](https://cloud.google.com/monitoring/api/v3/filters) + // that identifies the metric types, resources, and projects to query. + string filter = 1 [(google.api.field_behavior) = REQUIRED]; + + // By default, the raw time series data is returned. + // Use this field to combine multiple time series for different views of the + // data. + Aggregation aggregation = 2; + + // Apply a second aggregation after `aggregation` is applied. + Aggregation secondary_aggregation = 3; + + // Selects an optional time series filter. + oneof output_filter { + // Ranking based time series filter. + PickTimeSeriesFilter pick_time_series_filter = 4; + + // Statistics based time series filter. + // Note: This field is deprecated and completely ignored by the API. + StatisticalTimeSeriesFilter statistical_time_series_filter = 5 [deprecated = true]; + } +} + +// A pair of time series filters that define a ratio computation. The output +// time series is the pair-wise division of each aligned element from the +// numerator and denominator time series. +message TimeSeriesFilterRatio { + // Describes a query to build the numerator or denominator of a + // TimeSeriesFilterRatio. + message RatioPart { + // Required. The [monitoring + // filter](https://cloud.google.com/monitoring/api/v3/filters) that + // identifies the metric types, resources, and projects to query. + string filter = 1 [(google.api.field_behavior) = REQUIRED]; + + // By default, the raw time series data is returned. + // Use this field to combine multiple time series for different views of the + // data. + Aggregation aggregation = 2; + } + + // The numerator of the ratio. + RatioPart numerator = 1; + + // The denominator of the ratio. + RatioPart denominator = 2; + + // Apply a second aggregation after the ratio is computed. + Aggregation secondary_aggregation = 3; + + // Selects an optional filter that is applied to the time series after + // computing the ratio. + oneof output_filter { + // Ranking based time series filter. + PickTimeSeriesFilter pick_time_series_filter = 4; + + // Statistics based time series filter. + // Note: This field is deprecated and completely ignored by the API. + StatisticalTimeSeriesFilter statistical_time_series_filter = 5 [deprecated = true]; + } +} + +// Defines a threshold for categorizing time series values. +message Threshold { + // The color suggests an interpretation to the viewer when actual values cross + // the threshold. Comments on each color provide UX guidance on how users can + // be expected to interpret a given state color. + enum Color { + // Color is unspecified. Not allowed in well-formed requests. + COLOR_UNSPECIFIED = 0; + + // Crossing the threshold is "concerning" behavior. + YELLOW = 4; + + // Crossing the threshold is "emergency" behavior. + RED = 6; + } + + // Whether the threshold is considered crossed by an actual value above or + // below its threshold value. + enum Direction { + // Not allowed in well-formed requests. + DIRECTION_UNSPECIFIED = 0; + + // The threshold will be considered crossed if the actual value is above + // the threshold value. + ABOVE = 1; + + // The threshold will be considered crossed if the actual value is below + // the threshold value. + BELOW = 2; + } + + // A label for the threshold. + string label = 1; + + // The value of the threshold. The value should be defined in the native scale + // of the metric. + double value = 2; + + // The state color for this threshold. Color is not allowed in a XyChart. + Color color = 3; + + // The direction for the current threshold. Direction is not allowed in a + // XyChart. + Direction direction = 4; +} + +// Defines the possible types of spark chart supported by the `Scorecard`. +enum SparkChartType { + // Not allowed in well-formed requests. + SPARK_CHART_TYPE_UNSPECIFIED = 0; + + // The sparkline will be rendered as a small line chart. + SPARK_LINE = 1; + + // The sparkbar will be rendered as a small bar chart. + SPARK_BAR = 2; +} diff --git a/google/cloud/monitoring_dashboard_v1/proto/scorecard.proto b/google/cloud/monitoring_dashboard_v1/proto/scorecard.proto new file mode 100644 index 0000000..1820c03 --- /dev/null +++ b/google/cloud/monitoring_dashboard_v1/proto/scorecard.proto @@ -0,0 +1,111 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.monitoring.dashboard.v1; + +import "google/api/field_behavior.proto"; +import "google/monitoring/dashboard/v1/metrics.proto"; +import "google/protobuf/duration.proto"; + +option go_package = "google.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard"; +option java_multiple_files = true; +option java_outer_classname = "ScorecardProto"; +option java_package = "com.google.monitoring.dashboard.v1"; +option ruby_package = "Google::Cloud::Monitoring::Dashboard::V1"; + +// A widget showing the latest value of a metric, and how this value relates to +// one or more thresholds. +message Scorecard { + // A gauge chart shows where the current value sits within a pre-defined + // range. The upper and lower bounds should define the possible range of + // values for the scorecard's query (inclusive). + message GaugeView { + // The lower bound for this gauge chart. The value of the chart should + // always be greater than or equal to this. + double lower_bound = 1; + + // The upper bound for this gauge chart. The value of the chart should + // always be less than or equal to this. + double upper_bound = 2; + } + + // A sparkChart is a small chart suitable for inclusion in a table-cell or + // inline in text. This message contains the configuration for a sparkChart + // to show up on a Scorecard, showing recent trends of the scorecard's + // timeseries. + message SparkChartView { + // Required. The type of sparkchart to show in this chartView. + SparkChartType spark_chart_type = 1 [(google.api.field_behavior) = REQUIRED]; + + // The lower bound on data point frequency in the chart implemented by + // specifying the minimum alignment period to use in a time series query. + // For example, if the data is published once every 10 minutes it would not + // make sense to fetch and align data at one minute intervals. This field is + // optional and exists only as a hint. + google.protobuf.Duration min_alignment_period = 2; + } + + // Required. Fields for querying time series data from the + // Stackdriver metrics API. + TimeSeriesQuery time_series_query = 1 [(google.api.field_behavior) = REQUIRED]; + + // Defines the optional additional chart shown on the scorecard. If + // neither is included - then a default scorecard is shown. + oneof data_view { + // Will cause the scorecard to show a gauge chart. + GaugeView gauge_view = 4; + + // Will cause the scorecard to show a spark chart. + SparkChartView spark_chart_view = 5; + } + + // The thresholds used to determine the state of the scorecard given the + // time series' current value. For an actual value x, the scorecard is in a + // danger state if x is less than or equal to a danger threshold that triggers + // below, or greater than or equal to a danger threshold that triggers above. + // Similarly, if x is above/below a warning threshold that triggers + // above/below, then the scorecard is in a warning state - unless x also puts + // it in a danger state. (Danger trumps warning.) + // + // As an example, consider a scorecard with the following four thresholds: + // { + // value: 90, + // category: 'DANGER', + // trigger: 'ABOVE', + // }, + // { + // value: 70, + // category: 'WARNING', + // trigger: 'ABOVE', + // }, + // { + // value: 10, + // category: 'DANGER', + // trigger: 'BELOW', + // }, + // { + // value: 20, + // category: 'WARNING', + // trigger: 'BELOW', + // } + // + // Then: values less than or equal to 10 would put the scorecard in a DANGER + // state, values greater than 10 but less than or equal to 20 a WARNING state, + // values strictly between 20 and 70 an OK state, values greater than or equal + // to 70 but less than 90 a WARNING state, and values greater than or equal to + // 90 a DANGER state. + repeated Threshold thresholds = 6; +} diff --git a/google/cloud/monitoring_dashboard_v1/proto/service.proto b/google/cloud/monitoring_dashboard_v1/proto/service.proto new file mode 100644 index 0000000..5bb41ec --- /dev/null +++ b/google/cloud/monitoring_dashboard_v1/proto/service.proto @@ -0,0 +1,23 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.monitoring.dashboard.v1; + +option go_package = "google.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard"; +option java_multiple_files = true; +option java_outer_classname = "ServiceMonitoringProto"; +option java_package = "com.google.monitoring.dashboard.v1"; +option ruby_package = "Google::Cloud::Monitoring::Dashboard::V1"; diff --git a/google/cloud/monitoring_dashboard_v1/proto/text.proto b/google/cloud/monitoring_dashboard_v1/proto/text.proto new file mode 100644 index 0000000..acc1671 --- /dev/null +++ b/google/cloud/monitoring_dashboard_v1/proto/text.proto @@ -0,0 +1,44 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.monitoring.dashboard.v1; + +option go_package = "google.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard"; +option java_multiple_files = true; +option java_outer_classname = "TextProto"; +option java_package = "com.google.monitoring.dashboard.v1"; +option ruby_package = "Google::Cloud::Monitoring::Dashboard::V1"; + +// A widget that displays textual content. +message Text { + // The format type of the text content. + enum Format { + // Format is unspecified. Defaults to MARKDOWN. + FORMAT_UNSPECIFIED = 0; + + // The text contains Markdown formatting. + MARKDOWN = 1; + + // The text contains no special formatting. + RAW = 2; + } + + // The text content to be displayed. + string content = 1; + + // How the text content is formatted. + Format format = 2; +} diff --git a/google/cloud/monitoring_dashboard_v1/proto/widget.proto b/google/cloud/monitoring_dashboard_v1/proto/widget.proto new file mode 100644 index 0000000..12b5a69 --- /dev/null +++ b/google/cloud/monitoring_dashboard_v1/proto/widget.proto @@ -0,0 +1,51 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.monitoring.dashboard.v1; + +import "google/api/field_behavior.proto"; +import "google/monitoring/dashboard/v1/scorecard.proto"; +import "google/monitoring/dashboard/v1/text.proto"; +import "google/monitoring/dashboard/v1/xychart.proto"; +import "google/protobuf/empty.proto"; + +option go_package = "google.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard"; +option java_multiple_files = true; +option java_outer_classname = "WidgetProto"; +option java_package = "com.google.monitoring.dashboard.v1"; +option ruby_package = "Google::Cloud::Monitoring::Dashboard::V1"; + +// Widget contains a single dashboard component and configuration of how to +// present the component in the dashboard. +message Widget { + // Optional. The title of the widget. + string title = 1 [(google.api.field_behavior) = OPTIONAL]; + + // Content defines the component used to populate the widget. + oneof content { + // A chart of time series data. + XyChart xy_chart = 2; + + // A scorecard summarizing time series data. + Scorecard scorecard = 3; + + // A raw string or markdown displaying textual content. + Text text = 4; + + // A blank space. + google.protobuf.Empty blank = 5; + } +} diff --git a/google/cloud/monitoring_dashboard_v1/proto/xychart.proto b/google/cloud/monitoring_dashboard_v1/proto/xychart.proto new file mode 100644 index 0000000..d241ae7 --- /dev/null +++ b/google/cloud/monitoring_dashboard_v1/proto/xychart.proto @@ -0,0 +1,146 @@ +// Copyright 2020 Google LLC +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +syntax = "proto3"; + +package google.monitoring.dashboard.v1; + +import "google/api/field_behavior.proto"; +import "google/monitoring/dashboard/v1/metrics.proto"; +import "google/protobuf/duration.proto"; + +option go_package = "google.golang.org/genproto/googleapis/monitoring/dashboard/v1;dashboard"; +option java_multiple_files = true; +option java_outer_classname = "XyChartProto"; +option java_package = "com.google.monitoring.dashboard.v1"; +option ruby_package = "Google::Cloud::Monitoring::Dashboard::V1"; + +// A chart that displays data on a 2D (X and Y axes) plane. +message XyChart { + // Groups a time series query definition with charting options. + message DataSet { + // The types of plotting strategies for data sets. + enum PlotType { + // Plot type is unspecified. The view will default to `LINE`. + PLOT_TYPE_UNSPECIFIED = 0; + + // The data is plotted as a set of lines (one line per series). + LINE = 1; + + // The data is plotted as a set of filled areas (one area per series), + // with the areas stacked vertically (the base of each area is the top of + // its predecessor, and the base of the first area is the X axis). Since + // the areas do not overlap, each is filled with a different opaque color. + STACKED_AREA = 2; + + // The data is plotted as a set of rectangular boxes (one box per series), + // with the boxes stacked vertically (the base of each box is the top of + // its predecessor, and the base of the first box is the X axis). Since + // the boxes do not overlap, each is filled with a different opaque color. + STACKED_BAR = 3; + + // The data is plotted as a heatmap. The series being plotted must have a + // `DISTRIBUTION` value type. The value of each bucket in the distribution + // is displayed as a color. This type is not currently available in the + // Stackdriver Monitoring application. + HEATMAP = 4; + } + + // Required. Fields for querying time series data from the + // Stackdriver metrics API. + TimeSeriesQuery time_series_query = 1 [(google.api.field_behavior) = REQUIRED]; + + // How this data should be plotted on the chart. + PlotType plot_type = 2; + + // A template string for naming `TimeSeries` in the resulting data set. + // This should be a string with interpolations of the form `${label_name}`, + // which will resolve to the label's value. + string legend_template = 3; + + // Optional. The lower bound on data point frequency for this data set, implemented by + // specifying the minimum alignment period to use in a time series query + // For example, if the data is published once every 10 minutes, the + // `min_alignment_period` should be at least 10 minutes. It would not + // make sense to fetch and align data at one minute intervals. + google.protobuf.Duration min_alignment_period = 4 [(google.api.field_behavior) = OPTIONAL]; + } + + // A chart axis. + message Axis { + // Types of scales used in axes. + enum Scale { + // Scale is unspecified. The view will default to `LINEAR`. + SCALE_UNSPECIFIED = 0; + + // Linear scale. + LINEAR = 1; + + // Logarithmic scale (base 10). + LOG10 = 2; + } + + // The label of the axis. + string label = 1; + + // The axis scale. By default, a linear scale is used. + Scale scale = 2; + } + + // Required. The data displayed in this chart. + repeated DataSet data_sets = 1 [(google.api.field_behavior) = REQUIRED]; + + // The duration used to display a comparison chart. A comparison chart + // simultaneously shows values from two similar-length time periods + // (e.g., week-over-week metrics). + // The duration must be positive, and it can only be applied to charts with + // data sets of LINE plot type. + google.protobuf.Duration timeshift_duration = 4; + + // Threshold lines drawn horizontally across the chart. + repeated Threshold thresholds = 5; + + // The properties applied to the X axis. + Axis x_axis = 6; + + // The properties applied to the Y axis. + Axis y_axis = 7; + + // Display options for the chart. + ChartOptions chart_options = 8; +} + +// Options to control visual rendering of a chart. +message ChartOptions { + // Chart mode options. + enum Mode { + // Mode is unspecified. The view will default to `COLOR`. + MODE_UNSPECIFIED = 0; + + // The chart distinguishes data series using different color. Line + // colors may get reused when there are many lines in the chart. + COLOR = 1; + + // The chart uses the Stackdriver x-ray mode, in which each + // data set is plotted using the same semi-transparent color. + X_RAY = 2; + + // The chart displays statistics such as average, median, 95th percentile, + // and more. + STATS = 3; + } + + // The chart mode. + Mode mode = 1; +} diff --git a/google/cloud/monitoring_dashboard_v1/py.typed b/google/cloud/monitoring_dashboard_v1/py.typed new file mode 100644 index 0000000..a52708e --- /dev/null +++ b/google/cloud/monitoring_dashboard_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-monitoring-dashboard package uses inline types. diff --git a/google/cloud/monitoring_dashboard/v1/proto/dashboard_pb2_grpc.py b/google/cloud/monitoring_dashboard_v1/services/__init__.py similarity index 87% rename from google/cloud/monitoring_dashboard/v1/proto/dashboard_pb2_grpc.py rename to google/cloud/monitoring_dashboard_v1/services/__init__.py index b662812..42ffdf2 100644 --- a/google/cloud/monitoring_dashboard/v1/proto/dashboard_pb2_grpc.py +++ b/google/cloud/monitoring_dashboard_v1/services/__init__.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -11,7 +13,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! - -import grpc +# diff --git a/google/cloud/monitoring_dashboard_v1/services/dashboards_service/__init__.py b/google/cloud/monitoring_dashboard_v1/services/dashboards_service/__init__.py new file mode 100644 index 0000000..ad6c65c --- /dev/null +++ b/google/cloud/monitoring_dashboard_v1/services/dashboards_service/__init__.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .client import DashboardsServiceClient +from .async_client import DashboardsServiceAsyncClient + +__all__ = ( + "DashboardsServiceClient", + "DashboardsServiceAsyncClient", +) diff --git a/google/cloud/monitoring_dashboard_v1/services/dashboards_service/async_client.py b/google/cloud/monitoring_dashboard_v1/services/dashboards_service/async_client.py new file mode 100644 index 0000000..0d6f9fe --- /dev/null +++ b/google/cloud/monitoring_dashboard_v1/services/dashboards_service/async_client.py @@ -0,0 +1,391 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.monitoring_dashboard_v1.services.dashboards_service import pagers +from google.cloud.monitoring_dashboard_v1.types import dashboard +from google.cloud.monitoring_dashboard_v1.types import dashboards_service +from google.cloud.monitoring_dashboard_v1.types import layouts + +from .transports.base import DashboardsServiceTransport +from .transports.grpc_asyncio import DashboardsServiceGrpcAsyncIOTransport +from .client import DashboardsServiceClient + + +class DashboardsServiceAsyncClient: + """Manages Stackdriver dashboards. A dashboard is an arrangement + of data display widgets in a specific layout. + """ + + _client: DashboardsServiceClient + + DEFAULT_ENDPOINT = DashboardsServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DashboardsServiceClient.DEFAULT_MTLS_ENDPOINT + + dashboard_path = staticmethod(DashboardsServiceClient.dashboard_path) + + from_service_account_file = DashboardsServiceClient.from_service_account_file + from_service_account_json = from_service_account_file + + get_transport_class = functools.partial( + type(DashboardsServiceClient).get_transport_class, type(DashboardsServiceClient) + ) + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, DashboardsServiceTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + ) -> None: + """Instantiate the dashboards service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DashboardsServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint, this is the default value for + the environment variable) and "auto" (auto switch to the default + mTLS endpoint if client SSL credentials is present). However, + the ``api_endpoint`` property takes precedence if provided. + (2) The ``client_cert_source`` property is used to provide client + SSL credentials for mutual TLS transport. If not provided, the + default SSL credentials will be used if present. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + + self._client = DashboardsServiceClient( + credentials=credentials, transport=transport, client_options=client_options, + ) + + async def create_dashboard( + self, + request: dashboards_service.CreateDashboardRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dashboard.Dashboard: + r"""Creates a new custom dashboard. + + This method requires the ``monitoring.dashboards.create`` + permission on the specified project. For more information, see + `Google Cloud IAM `__. + + Args: + request (:class:`~.dashboards_service.CreateDashboardRequest`): + The request object. The `CreateDashboard` request. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dashboard.Dashboard: + A Google Stackdriver dashboard. + Dashboards define the content and layout + of pages in the Stackdriver web + application. + + """ + # Create or coerce a protobuf request object. + + request = dashboards_service.CreateDashboardRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_dashboard, + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_dashboards( + self, + request: dashboards_service.ListDashboardsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDashboardsAsyncPager: + r"""Lists the existing dashboards. + + This method requires the ``monitoring.dashboards.list`` + permission on the specified project. For more information, see + `Google Cloud IAM `__. + + Args: + request (:class:`~.dashboards_service.ListDashboardsRequest`): + The request object. The `ListDashboards` request. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListDashboardsAsyncPager: + The ``ListDashboards`` request. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + + request = dashboards_service.ListDashboardsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_dashboards, + default_timeout=None, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDashboardsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_dashboard( + self, + request: dashboards_service.GetDashboardRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dashboard.Dashboard: + r"""Fetches a specific dashboard. + + This method requires the ``monitoring.dashboards.get`` + permission on the specified dashboard. For more information, see + `Google Cloud IAM `__. + + Args: + request (:class:`~.dashboards_service.GetDashboardRequest`): + The request object. The `GetDashboard` request. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dashboard.Dashboard: + A Google Stackdriver dashboard. + Dashboards define the content and layout + of pages in the Stackdriver web + application. + + """ + # Create or coerce a protobuf request object. + + request = dashboards_service.GetDashboardRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_dashboard, + default_timeout=None, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_dashboard( + self, + request: dashboards_service.DeleteDashboardRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an existing custom dashboard. + + This method requires the ``monitoring.dashboards.delete`` + permission on the specified dashboard. For more information, see + `Google Cloud IAM `__. + + Args: + request (:class:`~.dashboards_service.DeleteDashboardRequest`): + The request object. The `DeleteDashboard` request. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + + request = dashboards_service.DeleteDashboardRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_dashboard, + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def update_dashboard( + self, + request: dashboards_service.UpdateDashboardRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dashboard.Dashboard: + r"""Replaces an existing custom dashboard with a new definition. + + This method requires the ``monitoring.dashboards.update`` + permission on the specified dashboard. For more information, see + `Google Cloud IAM `__. + + Args: + request (:class:`~.dashboards_service.UpdateDashboardRequest`): + The request object. The `UpdateDashboard` request. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dashboard.Dashboard: + A Google Stackdriver dashboard. + Dashboards define the content and layout + of pages in the Stackdriver web + application. + + """ + # Create or coerce a protobuf request object. + + request = dashboards_service.UpdateDashboardRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_dashboard, + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("dashboard.name", request.dashboard.name),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + _client_info = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-monitoring-dashboard", + ).version, + ) +except pkg_resources.DistributionNotFound: + _client_info = gapic_v1.client_info.ClientInfo() + + +__all__ = ("DashboardsServiceAsyncClient",) diff --git a/google/cloud/monitoring_dashboard_v1/services/dashboards_service/client.py b/google/cloud/monitoring_dashboard_v1/services/dashboards_service/client.py new file mode 100644 index 0000000..e0ea14e --- /dev/null +++ b/google/cloud/monitoring_dashboard_v1/services/dashboards_service/client.py @@ -0,0 +1,510 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import os +import re +from typing import Callable, Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.monitoring_dashboard_v1.services.dashboards_service import pagers +from google.cloud.monitoring_dashboard_v1.types import dashboard +from google.cloud.monitoring_dashboard_v1.types import dashboards_service +from google.cloud.monitoring_dashboard_v1.types import layouts + +from .transports.base import DashboardsServiceTransport +from .transports.grpc import DashboardsServiceGrpcTransport +from .transports.grpc_asyncio import DashboardsServiceGrpcAsyncIOTransport + + +class DashboardsServiceClientMeta(type): + """Metaclass for the DashboardsService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[DashboardsServiceTransport]] + _transport_registry["grpc"] = DashboardsServiceGrpcTransport + _transport_registry["grpc_asyncio"] = DashboardsServiceGrpcAsyncIOTransport + + def get_transport_class( + cls, label: str = None, + ) -> Type[DashboardsServiceTransport]: + """Return an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DashboardsServiceClient(metaclass=DashboardsServiceClientMeta): + """Manages Stackdriver dashboards. A dashboard is an arrangement + of data display widgets in a specific layout. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Convert api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "monitoring.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + {@api.name}: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @staticmethod + def dashboard_path(project: str, dashboard: str,) -> str: + """Return a fully-qualified dashboard string.""" + return "projects/{project}/dashboards/{dashboard}".format( + project=project, dashboard=dashboard, + ) + + @staticmethod + def parse_dashboard_path(path: str) -> Dict[str, str]: + """Parse a dashboard path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/dashboards/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, DashboardsServiceTransport] = None, + client_options: ClientOptions = None, + ) -> None: + """Instantiate the dashboards service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DashboardsServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint, this is the default value for + the environment variable) and "auto" (auto switch to the default + mTLS endpoint if client SSL credentials is present). However, + the ``api_endpoint`` property takes precedence if provided. + (2) The ``client_cert_source`` property is used to provide client + SSL credentials for mutual TLS transport. If not provided, the + default SSL credentials will be used if present. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = ClientOptions.from_dict(client_options) + if client_options is None: + client_options = ClientOptions.ClientOptions() + + if client_options.api_endpoint is None: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never") + if use_mtls_env == "never": + client_options.api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + has_client_cert_source = ( + client_options.client_cert_source is not None + or mtls.has_default_client_cert_source() + ) + client_options.api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT + if has_client_cert_source + else self.DEFAULT_ENDPOINT + ) + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, DashboardsServiceTransport): + # transport is a DashboardsServiceTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, " + "provide its scopes directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=client_options.api_endpoint, + scopes=client_options.scopes, + api_mtls_endpoint=client_options.api_endpoint, + client_cert_source=client_options.client_cert_source, + quota_project_id=client_options.quota_project_id, + ) + + def create_dashboard( + self, + request: dashboards_service.CreateDashboardRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dashboard.Dashboard: + r"""Creates a new custom dashboard. + + This method requires the ``monitoring.dashboards.create`` + permission on the specified project. For more information, see + `Google Cloud IAM `__. + + Args: + request (:class:`~.dashboards_service.CreateDashboardRequest`): + The request object. The `CreateDashboard` request. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dashboard.Dashboard: + A Google Stackdriver dashboard. + Dashboards define the content and layout + of pages in the Stackdriver web + application. + + """ + # Create or coerce a protobuf request object. + + request = dashboards_service.CreateDashboardRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_dashboard] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_dashboards( + self, + request: dashboards_service.ListDashboardsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDashboardsPager: + r"""Lists the existing dashboards. + + This method requires the ``monitoring.dashboards.list`` + permission on the specified project. For more information, see + `Google Cloud IAM `__. + + Args: + request (:class:`~.dashboards_service.ListDashboardsRequest`): + The request object. The `ListDashboards` request. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListDashboardsPager: + The ``ListDashboards`` request. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + + request = dashboards_service.ListDashboardsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_dashboards] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDashboardsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_dashboard( + self, + request: dashboards_service.GetDashboardRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dashboard.Dashboard: + r"""Fetches a specific dashboard. + + This method requires the ``monitoring.dashboards.get`` + permission on the specified dashboard. For more information, see + `Google Cloud IAM `__. + + Args: + request (:class:`~.dashboards_service.GetDashboardRequest`): + The request object. The `GetDashboard` request. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dashboard.Dashboard: + A Google Stackdriver dashboard. + Dashboards define the content and layout + of pages in the Stackdriver web + application. + + """ + # Create or coerce a protobuf request object. + + request = dashboards_service.GetDashboardRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_dashboard] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_dashboard( + self, + request: dashboards_service.DeleteDashboardRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an existing custom dashboard. + + This method requires the ``monitoring.dashboards.delete`` + permission on the specified dashboard. For more information, see + `Google Cloud IAM `__. + + Args: + request (:class:`~.dashboards_service.DeleteDashboardRequest`): + The request object. The `DeleteDashboard` request. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + + request = dashboards_service.DeleteDashboardRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_dashboard] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def update_dashboard( + self, + request: dashboards_service.UpdateDashboardRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dashboard.Dashboard: + r"""Replaces an existing custom dashboard with a new definition. + + This method requires the ``monitoring.dashboards.update`` + permission on the specified dashboard. For more information, see + `Google Cloud IAM `__. + + Args: + request (:class:`~.dashboards_service.UpdateDashboardRequest`): + The request object. The `UpdateDashboard` request. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dashboard.Dashboard: + A Google Stackdriver dashboard. + Dashboards define the content and layout + of pages in the Stackdriver web + application. + + """ + # Create or coerce a protobuf request object. + + request = dashboards_service.UpdateDashboardRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_dashboard] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("dashboard.name", request.dashboard.name),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + _client_info = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-monitoring-dashboard", + ).version, + ) +except pkg_resources.DistributionNotFound: + _client_info = gapic_v1.client_info.ClientInfo() + + +__all__ = ("DashboardsServiceClient",) diff --git a/google/cloud/monitoring_dashboard_v1/services/dashboards_service/pagers.py b/google/cloud/monitoring_dashboard_v1/services/dashboards_service/pagers.py new file mode 100644 index 0000000..95dae9a --- /dev/null +++ b/google/cloud/monitoring_dashboard_v1/services/dashboards_service/pagers.py @@ -0,0 +1,149 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple + +from google.cloud.monitoring_dashboard_v1.types import dashboard +from google.cloud.monitoring_dashboard_v1.types import dashboards_service + + +class ListDashboardsPager: + """A pager for iterating through ``list_dashboards`` requests. + + This class thinly wraps an initial + :class:`~.dashboards_service.ListDashboardsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``dashboards`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDashboards`` requests and continue to iterate + through the ``dashboards`` field on the + corresponding responses. + + All the usual :class:`~.dashboards_service.ListDashboardsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., dashboards_service.ListDashboardsResponse], + request: dashboards_service.ListDashboardsRequest, + response: dashboards_service.ListDashboardsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.dashboards_service.ListDashboardsRequest`): + The initial request object. + response (:class:`~.dashboards_service.ListDashboardsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dashboards_service.ListDashboardsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[dashboards_service.ListDashboardsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[dashboard.Dashboard]: + for page in self.pages: + yield from page.dashboards + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDashboardsAsyncPager: + """A pager for iterating through ``list_dashboards`` requests. + + This class thinly wraps an initial + :class:`~.dashboards_service.ListDashboardsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``dashboards`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDashboards`` requests and continue to iterate + through the ``dashboards`` field on the + corresponding responses. + + All the usual :class:`~.dashboards_service.ListDashboardsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[dashboards_service.ListDashboardsResponse]], + request: dashboards_service.ListDashboardsRequest, + response: dashboards_service.ListDashboardsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.dashboards_service.ListDashboardsRequest`): + The initial request object. + response (:class:`~.dashboards_service.ListDashboardsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dashboards_service.ListDashboardsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[dashboards_service.ListDashboardsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[dashboard.Dashboard]: + async def async_generator(): + async for page in self.pages: + for response in page.dashboards: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/cloud/monitoring_dashboard_v1/services/dashboards_service/transports/__init__.py b/google/cloud/monitoring_dashboard_v1/services/dashboards_service/transports/__init__.py new file mode 100644 index 0000000..7057d44 --- /dev/null +++ b/google/cloud/monitoring_dashboard_v1/services/dashboards_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from typing import Dict, Type + +from .base import DashboardsServiceTransport +from .grpc import DashboardsServiceGrpcTransport +from .grpc_asyncio import DashboardsServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DashboardsServiceTransport]] +_transport_registry["grpc"] = DashboardsServiceGrpcTransport +_transport_registry["grpc_asyncio"] = DashboardsServiceGrpcAsyncIOTransport + + +__all__ = ( + "DashboardsServiceTransport", + "DashboardsServiceGrpcTransport", + "DashboardsServiceGrpcAsyncIOTransport", +) diff --git a/google/cloud/monitoring_dashboard_v1/services/dashboards_service/transports/base.py b/google/cloud/monitoring_dashboard_v1/services/dashboards_service/transports/base.py new file mode 100644 index 0000000..45a4a7a --- /dev/null +++ b/google/cloud/monitoring_dashboard_v1/services/dashboards_service/transports/base.py @@ -0,0 +1,175 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc +import typing +import pkg_resources + +from google import auth +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.auth import credentials # type: ignore + +from google.cloud.monitoring_dashboard_v1.types import dashboard +from google.cloud.monitoring_dashboard_v1.types import dashboards_service +from google.protobuf import empty_pb2 as empty # type: ignore + + +try: + _client_info = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-monitoring-dashboard", + ).version, + ) +except pkg_resources.DistributionNotFound: + _client_info = gapic_v1.client_info.ClientInfo() + + +class DashboardsServiceTransport(abc.ABC): + """Abstract transport class for DashboardsService.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/monitoring", + "https://www.googleapis.com/auth/monitoring.read", + "https://www.googleapis.com/auth/monitoring.write", + ) + + def __init__( + self, + *, + host: str = "monitoring.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = auth.load_credentials_from_file( + credentials_file, scopes=scopes, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = auth.default( + scopes=scopes, quota_project_id=quota_project_id + ) + + # Save the credentials. + self._credentials = credentials + + # Lifted into its own function so it can be stubbed out during tests. + self._prep_wrapped_messages() + + def _prep_wrapped_messages(self): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_dashboard: gapic_v1.method.wrap_method( + self.create_dashboard, default_timeout=30.0, client_info=_client_info, + ), + self.list_dashboards: gapic_v1.method.wrap_method( + self.list_dashboards, default_timeout=None, client_info=_client_info, + ), + self.get_dashboard: gapic_v1.method.wrap_method( + self.get_dashboard, default_timeout=None, client_info=_client_info, + ), + self.delete_dashboard: gapic_v1.method.wrap_method( + self.delete_dashboard, default_timeout=30.0, client_info=_client_info, + ), + self.update_dashboard: gapic_v1.method.wrap_method( + self.update_dashboard, default_timeout=30.0, client_info=_client_info, + ), + } + + @property + def create_dashboard( + self, + ) -> typing.Callable[ + [dashboards_service.CreateDashboardRequest], + typing.Union[dashboard.Dashboard, typing.Awaitable[dashboard.Dashboard]], + ]: + raise NotImplementedError() + + @property + def list_dashboards( + self, + ) -> typing.Callable[ + [dashboards_service.ListDashboardsRequest], + typing.Union[ + dashboards_service.ListDashboardsResponse, + typing.Awaitable[dashboards_service.ListDashboardsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_dashboard( + self, + ) -> typing.Callable[ + [dashboards_service.GetDashboardRequest], + typing.Union[dashboard.Dashboard, typing.Awaitable[dashboard.Dashboard]], + ]: + raise NotImplementedError() + + @property + def delete_dashboard( + self, + ) -> typing.Callable[ + [dashboards_service.DeleteDashboardRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def update_dashboard( + self, + ) -> typing.Callable[ + [dashboards_service.UpdateDashboardRequest], + typing.Union[dashboard.Dashboard, typing.Awaitable[dashboard.Dashboard]], + ]: + raise NotImplementedError() + + +__all__ = ("DashboardsServiceTransport",) diff --git a/google/cloud/monitoring_dashboard_v1/services/dashboards_service/transports/grpc.py b/google/cloud/monitoring_dashboard_v1/services/dashboards_service/transports/grpc.py new file mode 100644 index 0000000..3c79d99 --- /dev/null +++ b/google/cloud/monitoring_dashboard_v1/services/dashboards_service/transports/grpc.py @@ -0,0 +1,363 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + + +import grpc # type: ignore + +from google.cloud.monitoring_dashboard_v1.types import dashboard +from google.cloud.monitoring_dashboard_v1.types import dashboards_service +from google.protobuf import empty_pb2 as empty # type: ignore + +from .base import DashboardsServiceTransport + + +class DashboardsServiceGrpcTransport(DashboardsServiceTransport): + """gRPC backend transport for DashboardsService. + + Manages Stackdriver dashboards. A dashboard is an arrangement + of data display widgets in a specific layout. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "monitoring.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If + provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A + callback to provide client SSL certificate bytes and private key + bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` + is None. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + self._stubs = {} # type: Dict[str, Callable] + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + @classmethod + def create_channel( + cls, + host: str = "monitoring.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + address (Optionsl[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Sanity check: Only create a new channel if we do not already + # have one. + if not hasattr(self, "_grpc_channel"): + self._grpc_channel = self.create_channel( + self._host, credentials=self._credentials, + ) + + # Return the channel from cache. + return self._grpc_channel + + @property + def create_dashboard( + self, + ) -> Callable[[dashboards_service.CreateDashboardRequest], dashboard.Dashboard]: + r"""Return a callable for the create dashboard method over gRPC. + + Creates a new custom dashboard. + + This method requires the ``monitoring.dashboards.create`` + permission on the specified project. For more information, see + `Google Cloud IAM `__. + + Returns: + Callable[[~.CreateDashboardRequest], + ~.Dashboard]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_dashboard" not in self._stubs: + self._stubs["create_dashboard"] = self.grpc_channel.unary_unary( + "/google.monitoring.dashboard.v1.DashboardsService/CreateDashboard", + request_serializer=dashboards_service.CreateDashboardRequest.serialize, + response_deserializer=dashboard.Dashboard.deserialize, + ) + return self._stubs["create_dashboard"] + + @property + def list_dashboards( + self, + ) -> Callable[ + [dashboards_service.ListDashboardsRequest], + dashboards_service.ListDashboardsResponse, + ]: + r"""Return a callable for the list dashboards method over gRPC. + + Lists the existing dashboards. + + This method requires the ``monitoring.dashboards.list`` + permission on the specified project. For more information, see + `Google Cloud IAM `__. + + Returns: + Callable[[~.ListDashboardsRequest], + ~.ListDashboardsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_dashboards" not in self._stubs: + self._stubs["list_dashboards"] = self.grpc_channel.unary_unary( + "/google.monitoring.dashboard.v1.DashboardsService/ListDashboards", + request_serializer=dashboards_service.ListDashboardsRequest.serialize, + response_deserializer=dashboards_service.ListDashboardsResponse.deserialize, + ) + return self._stubs["list_dashboards"] + + @property + def get_dashboard( + self, + ) -> Callable[[dashboards_service.GetDashboardRequest], dashboard.Dashboard]: + r"""Return a callable for the get dashboard method over gRPC. + + Fetches a specific dashboard. + + This method requires the ``monitoring.dashboards.get`` + permission on the specified dashboard. For more information, see + `Google Cloud IAM `__. + + Returns: + Callable[[~.GetDashboardRequest], + ~.Dashboard]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_dashboard" not in self._stubs: + self._stubs["get_dashboard"] = self.grpc_channel.unary_unary( + "/google.monitoring.dashboard.v1.DashboardsService/GetDashboard", + request_serializer=dashboards_service.GetDashboardRequest.serialize, + response_deserializer=dashboard.Dashboard.deserialize, + ) + return self._stubs["get_dashboard"] + + @property + def delete_dashboard( + self, + ) -> Callable[[dashboards_service.DeleteDashboardRequest], empty.Empty]: + r"""Return a callable for the delete dashboard method over gRPC. + + Deletes an existing custom dashboard. + + This method requires the ``monitoring.dashboards.delete`` + permission on the specified dashboard. For more information, see + `Google Cloud IAM `__. + + Returns: + Callable[[~.DeleteDashboardRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_dashboard" not in self._stubs: + self._stubs["delete_dashboard"] = self.grpc_channel.unary_unary( + "/google.monitoring.dashboard.v1.DashboardsService/DeleteDashboard", + request_serializer=dashboards_service.DeleteDashboardRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_dashboard"] + + @property + def update_dashboard( + self, + ) -> Callable[[dashboards_service.UpdateDashboardRequest], dashboard.Dashboard]: + r"""Return a callable for the update dashboard method over gRPC. + + Replaces an existing custom dashboard with a new definition. + + This method requires the ``monitoring.dashboards.update`` + permission on the specified dashboard. For more information, see + `Google Cloud IAM `__. + + Returns: + Callable[[~.UpdateDashboardRequest], + ~.Dashboard]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_dashboard" not in self._stubs: + self._stubs["update_dashboard"] = self.grpc_channel.unary_unary( + "/google.monitoring.dashboard.v1.DashboardsService/UpdateDashboard", + request_serializer=dashboards_service.UpdateDashboardRequest.serialize, + response_deserializer=dashboard.Dashboard.deserialize, + ) + return self._stubs["update_dashboard"] + + +__all__ = ("DashboardsServiceGrpcTransport",) diff --git a/google/cloud/monitoring_dashboard_v1/services/dashboards_service/transports/grpc_asyncio.py b/google/cloud/monitoring_dashboard_v1/services/dashboards_service/transports/grpc_asyncio.py new file mode 100644 index 0000000..56d63f6 --- /dev/null +++ b/google/cloud/monitoring_dashboard_v1/services/dashboards_service/transports/grpc_asyncio.py @@ -0,0 +1,362 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.monitoring_dashboard_v1.types import dashboard +from google.cloud.monitoring_dashboard_v1.types import dashboards_service +from google.protobuf import empty_pb2 as empty # type: ignore + +from .base import DashboardsServiceTransport +from .grpc import DashboardsServiceGrpcTransport + + +class DashboardsServiceGrpcAsyncIOTransport(DashboardsServiceTransport): + """gRPC AsyncIO backend transport for DashboardsService. + + Manages Stackdriver dashboards. A dashboard is an arrangement + of data display widgets in a specific layout. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "monitoring.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + address (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "monitoring.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If + provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A + callback to provide client SSL certificate bytes and private key + bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` + is None. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + self._stubs = {} + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Sanity check: Only create a new channel if we do not already + # have one. + if not hasattr(self, "_grpc_channel"): + self._grpc_channel = self.create_channel( + self._host, credentials=self._credentials, + ) + + # Return the channel from cache. + return self._grpc_channel + + @property + def create_dashboard( + self, + ) -> Callable[ + [dashboards_service.CreateDashboardRequest], Awaitable[dashboard.Dashboard] + ]: + r"""Return a callable for the create dashboard method over gRPC. + + Creates a new custom dashboard. + + This method requires the ``monitoring.dashboards.create`` + permission on the specified project. For more information, see + `Google Cloud IAM `__. + + Returns: + Callable[[~.CreateDashboardRequest], + Awaitable[~.Dashboard]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_dashboard" not in self._stubs: + self._stubs["create_dashboard"] = self.grpc_channel.unary_unary( + "/google.monitoring.dashboard.v1.DashboardsService/CreateDashboard", + request_serializer=dashboards_service.CreateDashboardRequest.serialize, + response_deserializer=dashboard.Dashboard.deserialize, + ) + return self._stubs["create_dashboard"] + + @property + def list_dashboards( + self, + ) -> Callable[ + [dashboards_service.ListDashboardsRequest], + Awaitable[dashboards_service.ListDashboardsResponse], + ]: + r"""Return a callable for the list dashboards method over gRPC. + + Lists the existing dashboards. + + This method requires the ``monitoring.dashboards.list`` + permission on the specified project. For more information, see + `Google Cloud IAM `__. + + Returns: + Callable[[~.ListDashboardsRequest], + Awaitable[~.ListDashboardsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_dashboards" not in self._stubs: + self._stubs["list_dashboards"] = self.grpc_channel.unary_unary( + "/google.monitoring.dashboard.v1.DashboardsService/ListDashboards", + request_serializer=dashboards_service.ListDashboardsRequest.serialize, + response_deserializer=dashboards_service.ListDashboardsResponse.deserialize, + ) + return self._stubs["list_dashboards"] + + @property + def get_dashboard( + self, + ) -> Callable[ + [dashboards_service.GetDashboardRequest], Awaitable[dashboard.Dashboard] + ]: + r"""Return a callable for the get dashboard method over gRPC. + + Fetches a specific dashboard. + + This method requires the ``monitoring.dashboards.get`` + permission on the specified dashboard. For more information, see + `Google Cloud IAM `__. + + Returns: + Callable[[~.GetDashboardRequest], + Awaitable[~.Dashboard]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_dashboard" not in self._stubs: + self._stubs["get_dashboard"] = self.grpc_channel.unary_unary( + "/google.monitoring.dashboard.v1.DashboardsService/GetDashboard", + request_serializer=dashboards_service.GetDashboardRequest.serialize, + response_deserializer=dashboard.Dashboard.deserialize, + ) + return self._stubs["get_dashboard"] + + @property + def delete_dashboard( + self, + ) -> Callable[[dashboards_service.DeleteDashboardRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete dashboard method over gRPC. + + Deletes an existing custom dashboard. + + This method requires the ``monitoring.dashboards.delete`` + permission on the specified dashboard. For more information, see + `Google Cloud IAM `__. + + Returns: + Callable[[~.DeleteDashboardRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_dashboard" not in self._stubs: + self._stubs["delete_dashboard"] = self.grpc_channel.unary_unary( + "/google.monitoring.dashboard.v1.DashboardsService/DeleteDashboard", + request_serializer=dashboards_service.DeleteDashboardRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_dashboard"] + + @property + def update_dashboard( + self, + ) -> Callable[ + [dashboards_service.UpdateDashboardRequest], Awaitable[dashboard.Dashboard] + ]: + r"""Return a callable for the update dashboard method over gRPC. + + Replaces an existing custom dashboard with a new definition. + + This method requires the ``monitoring.dashboards.update`` + permission on the specified dashboard. For more information, see + `Google Cloud IAM `__. + + Returns: + Callable[[~.UpdateDashboardRequest], + Awaitable[~.Dashboard]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_dashboard" not in self._stubs: + self._stubs["update_dashboard"] = self.grpc_channel.unary_unary( + "/google.monitoring.dashboard.v1.DashboardsService/UpdateDashboard", + request_serializer=dashboards_service.UpdateDashboardRequest.serialize, + response_deserializer=dashboard.Dashboard.deserialize, + ) + return self._stubs["update_dashboard"] + + +__all__ = ("DashboardsServiceGrpcAsyncIOTransport",) diff --git a/google/cloud/monitoring_dashboard_v1/types/__init__.py b/google/cloud/monitoring_dashboard_v1/types/__init__.py new file mode 100644 index 0000000..c963fe0 --- /dev/null +++ b/google/cloud/monitoring_dashboard_v1/types/__init__.py @@ -0,0 +1,75 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .common import ( + Aggregation, + PickTimeSeriesFilter, + StatisticalTimeSeriesFilter, +) +from .metrics import ( + TimeSeriesQuery, + TimeSeriesFilter, + TimeSeriesFilterRatio, + Threshold, +) +from .scorecard import Scorecard +from .text import Text +from .xychart import ( + XyChart, + ChartOptions, +) +from .widget import Widget +from .layouts import ( + GridLayout, + RowLayout, + ColumnLayout, +) +from .dashboard import Dashboard +from .dashboards_service import ( + CreateDashboardRequest, + ListDashboardsRequest, + ListDashboardsResponse, + GetDashboardRequest, + DeleteDashboardRequest, + UpdateDashboardRequest, +) + + +__all__ = ( + "Aggregation", + "PickTimeSeriesFilter", + "StatisticalTimeSeriesFilter", + "TimeSeriesQuery", + "TimeSeriesFilter", + "TimeSeriesFilterRatio", + "Threshold", + "Scorecard", + "Text", + "XyChart", + "ChartOptions", + "Widget", + "GridLayout", + "RowLayout", + "ColumnLayout", + "Dashboard", + "CreateDashboardRequest", + "ListDashboardsRequest", + "ListDashboardsResponse", + "GetDashboardRequest", + "DeleteDashboardRequest", + "UpdateDashboardRequest", +) diff --git a/google/cloud/monitoring_dashboard_v1/types/common.py b/google/cloud/monitoring_dashboard_v1/types/common.py new file mode 100644 index 0000000..b90f9c6 --- /dev/null +++ b/google/cloud/monitoring_dashboard_v1/types/common.py @@ -0,0 +1,270 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.protobuf import duration_pb2 as duration # type: ignore + + +__protobuf__ = proto.module( + package="google.monitoring.dashboard.v1", + manifest={"Aggregation", "PickTimeSeriesFilter", "StatisticalTimeSeriesFilter",}, +) + + +class Aggregation(proto.Message): + r"""Describes how to combine multiple time series to provide a different + view of the data. Aggregation of time series is done in two steps. + First, each time series in the set is *aligned* to the same time + interval boundaries, then the set of time series is optionally + *reduced* in number. + + Alignment consists of applying the ``per_series_aligner`` operation + to each time series after its data has been divided into regular + ``alignment_period`` time intervals. This process takes *all* of the + data points in an alignment period, applies a mathematical + transformation such as averaging, minimum, maximum, delta, etc., and + converts them into a single data point per period. + + Reduction is when the aligned and transformed time series can + optionally be combined, reducing the number of time series through + similar mathematical transformations. Reduction involves applying a + ``cross_series_reducer`` to all the time series, optionally sorting + the time series into subsets with ``group_by_fields``, and applying + the reducer to each subset. + + The raw time series data can contain a huge amount of information + from multiple sources. Alignment and reduction transforms this mass + of data into a more manageable and representative collection of + data, for example "the 95% latency across the average of all tasks + in a cluster". This representative data can be more easily graphed + and comprehended, and the individual time series data is still + available for later drilldown. For more details, see `Filtering and + aggregation `__. + + Attributes: + alignment_period (~.duration.Duration): + The ``alignment_period`` specifies a time interval, in + seconds, that is used to divide the data in all the [time + series][google.monitoring.v3.TimeSeries] into consistent + blocks of time. This will be done before the per-series + aligner can be applied to the data. + + The value must be at least 60 seconds. If a per-series + aligner other than ``ALIGN_NONE`` is specified, this field + is required or an error is returned. If no per-series + aligner is specified, or the aligner ``ALIGN_NONE`` is + specified, then this field is ignored. + per_series_aligner (~.common.Aggregation.Aligner): + An ``Aligner`` describes how to bring the data points in a + single time series into temporal alignment. Except for + ``ALIGN_NONE``, all alignments cause all the data points in + an ``alignment_period`` to be mathematically grouped + together, resulting in a single data point for each + ``alignment_period`` with end timestamp at the end of the + period. + + Not all alignment operations may be applied to all time + series. The valid choices depend on the ``metric_kind`` and + ``value_type`` of the original time series. Alignment can + change the ``metric_kind`` or the ``value_type`` of the time + series. + + Time series data must be aligned in order to perform + cross-time series reduction. If ``cross_series_reducer`` is + specified, then ``per_series_aligner`` must be specified and + not equal to ``ALIGN_NONE`` and ``alignment_period`` must be + specified; otherwise, an error is returned. + cross_series_reducer (~.common.Aggregation.Reducer): + The reduction operation to be used to combine time series + into a single time series, where the value of each data + point in the resulting series is a function of all the + already aligned values in the input time series. + + Not all reducer operations can be applied to all time + series. The valid choices depend on the ``metric_kind`` and + the ``value_type`` of the original time series. Reduction + can yield a time series with a different ``metric_kind`` or + ``value_type`` than the input time series. + + Time series data must first be aligned (see + ``per_series_aligner``) in order to perform cross-time + series reduction. If ``cross_series_reducer`` is specified, + then ``per_series_aligner`` must be specified, and must not + be ``ALIGN_NONE``. An ``alignment_period`` must also be + specified; otherwise, an error is returned. + group_by_fields (Sequence[str]): + The set of fields to preserve when ``cross_series_reducer`` + is specified. The ``group_by_fields`` determine how the time + series are partitioned into subsets prior to applying the + aggregation operation. Each subset contains time series that + have the same value for each of the grouping fields. Each + individual time series is a member of exactly one subset. + The ``cross_series_reducer`` is applied to each subset of + time series. It is not possible to reduce across different + resource types, so this field implicitly contains + ``resource.type``. Fields not specified in + ``group_by_fields`` are aggregated away. If + ``group_by_fields`` is not specified and all the time series + have the same resource type, then the time series are + aggregated into a single output time series. If + ``cross_series_reducer`` is not defined, this field is + ignored. + """ + + class Aligner(proto.Enum): + r"""The ``Aligner`` specifies the operation that will be applied to the + data points in each alignment period in a time series. Except for + ``ALIGN_NONE``, which specifies that no operation be applied, each + alignment operation replaces the set of data values in each + alignment period with a single value: the result of applying the + operation to the data values. An aligned time series has a single + data value at the end of each ``alignment_period``. + + An alignment operation can change the data type of the values, too. + For example, if you apply a counting operation to boolean values, + the data ``value_type`` in the original time series is ``BOOLEAN``, + but the ``value_type`` in the aligned result is ``INT64``. + """ + ALIGN_NONE = 0 + ALIGN_DELTA = 1 + ALIGN_RATE = 2 + ALIGN_INTERPOLATE = 3 + ALIGN_NEXT_OLDER = 4 + ALIGN_MIN = 10 + ALIGN_MAX = 11 + ALIGN_MEAN = 12 + ALIGN_COUNT = 13 + ALIGN_SUM = 14 + ALIGN_STDDEV = 15 + ALIGN_COUNT_TRUE = 16 + ALIGN_COUNT_FALSE = 24 + ALIGN_FRACTION_TRUE = 17 + ALIGN_PERCENTILE_99 = 18 + ALIGN_PERCENTILE_95 = 19 + ALIGN_PERCENTILE_50 = 20 + ALIGN_PERCENTILE_05 = 21 + ALIGN_PERCENT_CHANGE = 23 + + class Reducer(proto.Enum): + r"""A Reducer operation describes how to aggregate data points + from multiple time series into a single time series, where the + value of each data point in the resulting series is a function + of all the already aligned values in the input time series. + """ + REDUCE_NONE = 0 + REDUCE_MEAN = 1 + REDUCE_MIN = 2 + REDUCE_MAX = 3 + REDUCE_SUM = 4 + REDUCE_STDDEV = 5 + REDUCE_COUNT = 6 + REDUCE_COUNT_TRUE = 7 + REDUCE_COUNT_FALSE = 15 + REDUCE_FRACTION_TRUE = 8 + REDUCE_PERCENTILE_99 = 9 + REDUCE_PERCENTILE_95 = 10 + REDUCE_PERCENTILE_50 = 11 + REDUCE_PERCENTILE_05 = 12 + + alignment_period = proto.Field(proto.MESSAGE, number=1, message=duration.Duration,) + + per_series_aligner = proto.Field(proto.ENUM, number=2, enum=Aligner,) + + cross_series_reducer = proto.Field(proto.ENUM, number=4, enum=Reducer,) + + group_by_fields = proto.RepeatedField(proto.STRING, number=5) + + +class PickTimeSeriesFilter(proto.Message): + r"""Describes a ranking-based time series filter. Each input time series + is ranked with an aligner. The filter will allow up to + ``num_time_series`` time series to pass through it, selecting them + based on the relative ranking. + + For example, if ``ranking_method`` is + ``METHOD_MEAN``,\ ``direction`` is ``BOTTOM``, and + ``num_time_series`` is 3, then the 3 times series with the lowest + mean values will pass through the filter. + + Attributes: + ranking_method (~.common.PickTimeSeriesFilter.Method): + ``ranking_method`` is applied to each time series + independently to produce the value which will be used to + compare the time series to other time series. + num_time_series (int): + How many time series to allow to pass through + the filter. + direction (~.common.PickTimeSeriesFilter.Direction): + How to use the ranking to select time series + that pass through the filter. + """ + + class Method(proto.Enum): + r"""The value reducers that can be applied to a + ``PickTimeSeriesFilter``. + """ + METHOD_UNSPECIFIED = 0 + METHOD_MEAN = 1 + METHOD_MAX = 2 + METHOD_MIN = 3 + METHOD_SUM = 4 + METHOD_LATEST = 5 + + class Direction(proto.Enum): + r"""Describes the ranking directions.""" + DIRECTION_UNSPECIFIED = 0 + TOP = 1 + BOTTOM = 2 + + ranking_method = proto.Field(proto.ENUM, number=1, enum=Method,) + + num_time_series = proto.Field(proto.INT32, number=2) + + direction = proto.Field(proto.ENUM, number=3, enum=Direction,) + + +class StatisticalTimeSeriesFilter(proto.Message): + r"""A filter that ranks streams based on their statistical + relation to other streams in a request. + Note: This field is deprecated and completely ignored by the + API. + + Attributes: + ranking_method (~.common.StatisticalTimeSeriesFilter.Method): + ``rankingMethod`` is applied to a set of time series, and + then the produced value for each individual time series is + used to compare a given time series to others. These are + methods that cannot be applied stream-by-stream, but rather + require the full context of a request to evaluate time + series. + num_time_series (int): + How many time series to output. + """ + + class Method(proto.Enum): + r"""The filter methods that can be applied to a stream.""" + METHOD_UNSPECIFIED = 0 + METHOD_CLUSTER_OUTLIER = 1 + + ranking_method = proto.Field(proto.ENUM, number=1, enum=Method,) + + num_time_series = proto.Field(proto.INT32, number=2) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/monitoring_dashboard_v1/types/dashboard.py b/google/cloud/monitoring_dashboard_v1/types/dashboard.py new file mode 100644 index 0000000..5077b48 --- /dev/null +++ b/google/cloud/monitoring_dashboard_v1/types/dashboard.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.cloud.monitoring_dashboard_v1.types import layouts + + +__protobuf__ = proto.module( + package="google.monitoring.dashboard.v1", manifest={"Dashboard",}, +) + + +class Dashboard(proto.Message): + r"""A Google Stackdriver dashboard. Dashboards define the content + and layout of pages in the Stackdriver web application. + + Attributes: + name (str): + Immutable. The resource name of the + dashboard. + display_name (str): + Required. The mutable, human-readable name. + etag (str): + ``etag`` is used for optimistic concurrency control as a way + to help prevent simultaneous updates of a policy from + overwriting each other. An ``etag`` is returned in the + response to ``GetDashboard``, and users are expected to put + that etag in the request to ``UpdateDashboard`` to ensure + that their change will be applied to the same version of the + Dashboard configuration. The field should not be passed + during dashboard creation. + grid_layout (~.layouts.GridLayout): + Content is arranged with a basic layout that + re-flows a simple list of informational elements + like widgets or tiles. + row_layout (~.layouts.RowLayout): + The content is divided into equally spaced + rows and the widgets are arranged horizontally. + column_layout (~.layouts.ColumnLayout): + The content is divided into equally spaced + columns and the widgets are arranged vertically. + """ + + name = proto.Field(proto.STRING, number=1) + + display_name = proto.Field(proto.STRING, number=2) + + etag = proto.Field(proto.STRING, number=4) + + grid_layout = proto.Field( + proto.MESSAGE, number=5, oneof="layout", message=layouts.GridLayout, + ) + + row_layout = proto.Field( + proto.MESSAGE, number=8, oneof="layout", message=layouts.RowLayout, + ) + + column_layout = proto.Field( + proto.MESSAGE, number=9, oneof="layout", message=layouts.ColumnLayout, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/monitoring_dashboard_v1/types/dashboards_service.py b/google/cloud/monitoring_dashboard_v1/types/dashboards_service.py new file mode 100644 index 0000000..92cd956 --- /dev/null +++ b/google/cloud/monitoring_dashboard_v1/types/dashboards_service.py @@ -0,0 +1,157 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.cloud.monitoring_dashboard_v1.types import dashboard as gmd_dashboard + + +__protobuf__ = proto.module( + package="google.monitoring.dashboard.v1", + manifest={ + "CreateDashboardRequest", + "ListDashboardsRequest", + "ListDashboardsResponse", + "GetDashboardRequest", + "DeleteDashboardRequest", + "UpdateDashboardRequest", + }, +) + + +class CreateDashboardRequest(proto.Message): + r"""The ``CreateDashboard`` request. + + Attributes: + parent (str): + Required. The project on which to execute the request. The + format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + The ``[PROJECT_ID_OR_NUMBER]`` must match the dashboard + resource name. + dashboard (~.gmd_dashboard.Dashboard): + Required. The initial dashboard + specification. + """ + + parent = proto.Field(proto.STRING, number=1) + + dashboard = proto.Field(proto.MESSAGE, number=2, message=gmd_dashboard.Dashboard,) + + +class ListDashboardsRequest(proto.Message): + r"""The ``ListDashboards`` request. + + Attributes: + parent (str): + Required. The scope of the dashboards to list. The format + is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + page_size (int): + A positive number that is the maximum number + of results to return. If unspecified, a default + of 1000 is used. + page_token (str): + If this field is not empty then it must contain the + ``nextPageToken`` value returned by a previous call to this + method. Using this field causes the method to return + additional results from the previous method call. + """ + + parent = proto.Field(proto.STRING, number=1) + + page_size = proto.Field(proto.INT32, number=2) + + page_token = proto.Field(proto.STRING, number=3) + + +class ListDashboardsResponse(proto.Message): + r"""The ``ListDashboards`` request. + + Attributes: + dashboards (Sequence[~.gmd_dashboard.Dashboard]): + The list of requested dashboards. + next_page_token (str): + If there are more results than have been returned, then this + field is set to a non-empty value. To see the additional + results, use that value as ``page_token`` in the next call + to this method. + """ + + @property + def raw_page(self): + return self + + dashboards = proto.RepeatedField( + proto.MESSAGE, number=1, message=gmd_dashboard.Dashboard, + ) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class GetDashboardRequest(proto.Message): + r"""The ``GetDashboard`` request. + + Attributes: + name (str): + Required. The resource name of the Dashboard. The format is + one of: + + - ``dashboards/[DASHBOARD_ID]`` (for system dashboards) + - ``projects/[PROJECT_ID_OR_NUMBER]/dashboards/[DASHBOARD_ID]`` + (for custom dashboards). + """ + + name = proto.Field(proto.STRING, number=1) + + +class DeleteDashboardRequest(proto.Message): + r"""The ``DeleteDashboard`` request. + + Attributes: + name (str): + Required. The resource name of the Dashboard. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/dashboards/[DASHBOARD_ID] + """ + + name = proto.Field(proto.STRING, number=1) + + +class UpdateDashboardRequest(proto.Message): + r"""The ``UpdateDashboard`` request. + + Attributes: + dashboard (~.gmd_dashboard.Dashboard): + Required. The dashboard that will replace the + existing dashboard. + """ + + dashboard = proto.Field(proto.MESSAGE, number=1, message=gmd_dashboard.Dashboard,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/monitoring_dashboard/v1/proto/drilldowns_pb2_grpc.py b/google/cloud/monitoring_dashboard_v1/types/drilldowns.py similarity index 78% rename from google/cloud/monitoring_dashboard/v1/proto/drilldowns_pb2_grpc.py rename to google/cloud/monitoring_dashboard_v1/types/drilldowns.py index b662812..7c1c138 100644 --- a/google/cloud/monitoring_dashboard/v1/proto/drilldowns_pb2_grpc.py +++ b/google/cloud/monitoring_dashboard_v1/types/drilldowns.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -11,7 +13,10 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# + + +__protobuf__ = proto.module(package="google.monitoring.dashboard.v1", manifest={},) -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/monitoring_dashboard_v1/types/layouts.py b/google/cloud/monitoring_dashboard_v1/types/layouts.py new file mode 100644 index 0000000..683f6f3 --- /dev/null +++ b/google/cloud/monitoring_dashboard_v1/types/layouts.py @@ -0,0 +1,115 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.cloud.monitoring_dashboard_v1.types import widget + + +__protobuf__ = proto.module( + package="google.monitoring.dashboard.v1", + manifest={"GridLayout", "RowLayout", "ColumnLayout",}, +) + + +class GridLayout(proto.Message): + r"""A basic layout divides the available space into vertical + columns of equal width and arranges a list of widgets using a + row-first strategy. + + Attributes: + columns (int): + The number of columns into which the view's + width is divided. If omitted or set to zero, a + system default will be used while rendering. + widgets (Sequence[~.widget.Widget]): + The informational elements that are arranged + into the columns row-first. + """ + + columns = proto.Field(proto.INT64, number=1) + + widgets = proto.RepeatedField(proto.MESSAGE, number=2, message=widget.Widget,) + + +class RowLayout(proto.Message): + r"""A simplified layout that divides the available space into + rows and arranges a set of widgets horizontally in each row. + + Attributes: + rows (Sequence[~.layouts.RowLayout.Row]): + The rows of content to display. + """ + + class Row(proto.Message): + r"""Defines the layout properties and content for a row. + + Attributes: + weight (int): + The relative weight of this row. The row + weight is used to adjust the height of rows on + the screen (relative to peers). Greater the + weight, greater the height of the row on the + screen. If omitted, a value of 1 is used while + rendering. + widgets (Sequence[~.widget.Widget]): + The display widgets arranged horizontally in + this row. + """ + + weight = proto.Field(proto.INT64, number=1) + + widgets = proto.RepeatedField(proto.MESSAGE, number=2, message=widget.Widget,) + + rows = proto.RepeatedField(proto.MESSAGE, number=1, message=Row,) + + +class ColumnLayout(proto.Message): + r"""A simplified layout that divides the available space into + vertical columns and arranges a set of widgets vertically in + each column. + + Attributes: + columns (Sequence[~.layouts.ColumnLayout.Column]): + The columns of content to display. + """ + + class Column(proto.Message): + r"""Defines the layout properties and content for a column. + + Attributes: + weight (int): + The relative weight of this column. The + column weight is used to adjust the width of + columns on the screen (relative to peers). + Greater the weight, greater the width of the + column on the screen. If omitted, a value of 1 + is used while rendering. + widgets (Sequence[~.widget.Widget]): + The display widgets arranged vertically in + this column. + """ + + weight = proto.Field(proto.INT64, number=1) + + widgets = proto.RepeatedField(proto.MESSAGE, number=2, message=widget.Widget,) + + columns = proto.RepeatedField(proto.MESSAGE, number=1, message=Column,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/monitoring_dashboard_v1/types/metrics.py b/google/cloud/monitoring_dashboard_v1/types/metrics.py new file mode 100644 index 0000000..5eb7570 --- /dev/null +++ b/google/cloud/monitoring_dashboard_v1/types/metrics.py @@ -0,0 +1,236 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.cloud.monitoring_dashboard_v1.types import common + + +__protobuf__ = proto.module( + package="google.monitoring.dashboard.v1", + manifest={ + "SparkChartType", + "TimeSeriesQuery", + "TimeSeriesFilter", + "TimeSeriesFilterRatio", + "Threshold", + }, +) + + +class SparkChartType(proto.Enum): + r"""Defines the possible types of spark chart supported by the + ``Scorecard``. + """ + SPARK_CHART_TYPE_UNSPECIFIED = 0 + SPARK_LINE = 1 + SPARK_BAR = 2 + + +class TimeSeriesQuery(proto.Message): + r"""TimeSeriesQuery collects the set of supported methods for + querying time series data from the Stackdriver metrics API. + + Attributes: + time_series_filter (~.metrics.TimeSeriesFilter): + Filter parameters to fetch time series. + time_series_filter_ratio (~.metrics.TimeSeriesFilterRatio): + Parameters to fetch a ratio between two time + series filters. + time_series_query_language (str): + A query used to fetch time series. + unit_override (str): + The unit of data contained in fetched time series. If + non-empty, this unit will override any unit that accompanies + fetched data. The format is the same as the + ```unit`` `__ + field in ``MetricDescriptor``. + """ + + time_series_filter = proto.Field( + proto.MESSAGE, number=1, oneof="source", message="TimeSeriesFilter", + ) + + time_series_filter_ratio = proto.Field( + proto.MESSAGE, number=2, oneof="source", message="TimeSeriesFilterRatio", + ) + + time_series_query_language = proto.Field(proto.STRING, number=3, oneof="source") + + unit_override = proto.Field(proto.STRING, number=5) + + +class TimeSeriesFilter(proto.Message): + r"""A filter that defines a subset of time series data that is displayed + in a widget. Time series data is fetched using the + ```ListTimeSeries`` `__ + method. + + Attributes: + filter (str): + Required. The `monitoring + filter `__ + that identifies the metric types, resources, and projects to + query. + aggregation (~.common.Aggregation): + By default, the raw time series data is + returned. Use this field to combine multiple + time series for different views of the data. + secondary_aggregation (~.common.Aggregation): + Apply a second aggregation after ``aggregation`` is applied. + pick_time_series_filter (~.common.PickTimeSeriesFilter): + Ranking based time series filter. + statistical_time_series_filter (~.common.StatisticalTimeSeriesFilter): + Statistics based time series filter. + Note: This field is deprecated and completely + ignored by the API. + """ + + filter = proto.Field(proto.STRING, number=1) + + aggregation = proto.Field(proto.MESSAGE, number=2, message=common.Aggregation,) + + secondary_aggregation = proto.Field( + proto.MESSAGE, number=3, message=common.Aggregation, + ) + + pick_time_series_filter = proto.Field( + proto.MESSAGE, + number=4, + oneof="output_filter", + message=common.PickTimeSeriesFilter, + ) + + statistical_time_series_filter = proto.Field( + proto.MESSAGE, + number=5, + oneof="output_filter", + message=common.StatisticalTimeSeriesFilter, + ) + + +class TimeSeriesFilterRatio(proto.Message): + r"""A pair of time series filters that define a ratio + computation. The output time series is the pair-wise division of + each aligned element from the numerator and denominator time + series. + + Attributes: + numerator (~.metrics.TimeSeriesFilterRatio.RatioPart): + The numerator of the ratio. + denominator (~.metrics.TimeSeriesFilterRatio.RatioPart): + The denominator of the ratio. + secondary_aggregation (~.common.Aggregation): + Apply a second aggregation after the ratio is + computed. + pick_time_series_filter (~.common.PickTimeSeriesFilter): + Ranking based time series filter. + statistical_time_series_filter (~.common.StatisticalTimeSeriesFilter): + Statistics based time series filter. + Note: This field is deprecated and completely + ignored by the API. + """ + + class RatioPart(proto.Message): + r"""Describes a query to build the numerator or denominator of a + TimeSeriesFilterRatio. + + Attributes: + filter (str): + Required. The `monitoring + filter `__ + that identifies the metric types, resources, and projects to + query. + aggregation (~.common.Aggregation): + By default, the raw time series data is + returned. Use this field to combine multiple + time series for different views of the data. + """ + + filter = proto.Field(proto.STRING, number=1) + + aggregation = proto.Field(proto.MESSAGE, number=2, message=common.Aggregation,) + + numerator = proto.Field(proto.MESSAGE, number=1, message=RatioPart,) + + denominator = proto.Field(proto.MESSAGE, number=2, message=RatioPart,) + + secondary_aggregation = proto.Field( + proto.MESSAGE, number=3, message=common.Aggregation, + ) + + pick_time_series_filter = proto.Field( + proto.MESSAGE, + number=4, + oneof="output_filter", + message=common.PickTimeSeriesFilter, + ) + + statistical_time_series_filter = proto.Field( + proto.MESSAGE, + number=5, + oneof="output_filter", + message=common.StatisticalTimeSeriesFilter, + ) + + +class Threshold(proto.Message): + r"""Defines a threshold for categorizing time series values. + + Attributes: + label (str): + A label for the threshold. + value (float): + The value of the threshold. The value should + be defined in the native scale of the metric. + color (~.metrics.Threshold.Color): + The state color for this threshold. Color is + not allowed in a XyChart. + direction (~.metrics.Threshold.Direction): + The direction for the current threshold. + Direction is not allowed in a XyChart. + """ + + class Color(proto.Enum): + r"""The color suggests an interpretation to the viewer when + actual values cross the threshold. Comments on each color + provide UX guidance on how users can be expected to interpret a + given state color. + """ + COLOR_UNSPECIFIED = 0 + YELLOW = 4 + RED = 6 + + class Direction(proto.Enum): + r"""Whether the threshold is considered crossed by an actual + value above or below its threshold value. + """ + DIRECTION_UNSPECIFIED = 0 + ABOVE = 1 + BELOW = 2 + + label = proto.Field(proto.STRING, number=1) + + value = proto.Field(proto.DOUBLE, number=2) + + color = proto.Field(proto.ENUM, number=3, enum=Color,) + + direction = proto.Field(proto.ENUM, number=4, enum=Direction,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/monitoring_dashboard_v1/types/scorecard.py b/google/cloud/monitoring_dashboard_v1/types/scorecard.py new file mode 100644 index 0000000..420ab73 --- /dev/null +++ b/google/cloud/monitoring_dashboard_v1/types/scorecard.py @@ -0,0 +1,152 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.cloud.monitoring_dashboard_v1.types import metrics +from google.protobuf import duration_pb2 as duration # type: ignore + + +__protobuf__ = proto.module( + package="google.monitoring.dashboard.v1", manifest={"Scorecard",}, +) + + +class Scorecard(proto.Message): + r"""A widget showing the latest value of a metric, and how this + value relates to one or more thresholds. + + Attributes: + time_series_query (~.metrics.TimeSeriesQuery): + Required. Fields for querying time series + data from the Stackdriver metrics API. + gauge_view (~.scorecard.Scorecard.GaugeView): + Will cause the scorecard to show a gauge + chart. + spark_chart_view (~.scorecard.Scorecard.SparkChartView): + Will cause the scorecard to show a spark + chart. + thresholds (Sequence[~.metrics.Threshold]): + The thresholds used to determine the state of + the scorecard given the time series' current + value. For an actual value x, the scorecard is + in a danger state if x is less than or equal to + a danger threshold that triggers below, or + greater than or equal to a danger threshold that + triggers above. Similarly, if x is above/below a + warning threshold that triggers above/below, + then the scorecard is in a warning state - + unless x also puts it in a danger state. (Danger + trumps warning.) + As an example, consider a scorecard with the + following four thresholds: { + value: 90, + category: 'DANGER', + trigger: 'ABOVE', + }, + { + value: 70, + category: 'WARNING', + trigger: 'ABOVE', + }, + { + value: 10, + category: 'DANGER', + trigger: 'BELOW', + }, + { + value: 20, + category: 'WARNING', + trigger: 'BELOW', + } + + Then: values less than or equal to 10 would put + the scorecard in a DANGER state, values greater + than 10 but less than or equal to 20 a WARNING + state, values strictly between 20 and 70 an OK + state, values greater than or equal to 70 but + less than 90 a WARNING state, and values greater + than or equal to 90 a DANGER state. + """ + + class GaugeView(proto.Message): + r"""A gauge chart shows where the current value sits within a + pre-defined range. The upper and lower bounds should define the + possible range of values for the scorecard's query (inclusive). + + Attributes: + lower_bound (float): + The lower bound for this gauge chart. The + value of the chart should always be greater than + or equal to this. + upper_bound (float): + The upper bound for this gauge chart. The + value of the chart should always be less than or + equal to this. + """ + + lower_bound = proto.Field(proto.DOUBLE, number=1) + + upper_bound = proto.Field(proto.DOUBLE, number=2) + + class SparkChartView(proto.Message): + r"""A sparkChart is a small chart suitable for inclusion in a + table-cell or inline in text. This message contains the + configuration for a sparkChart to show up on a Scorecard, + showing recent trends of the scorecard's timeseries. + + Attributes: + spark_chart_type (~.metrics.SparkChartType): + Required. The type of sparkchart to show in + this chartView. + min_alignment_period (~.duration.Duration): + The lower bound on data point frequency in + the chart implemented by specifying the minimum + alignment period to use in a time series query. + For example, if the data is published once every + 10 minutes it would not make sense to fetch and + align data at one minute intervals. This field + is optional and exists only as a hint. + """ + + spark_chart_type = proto.Field( + proto.ENUM, number=1, enum=metrics.SparkChartType, + ) + + min_alignment_period = proto.Field( + proto.MESSAGE, number=2, message=duration.Duration, + ) + + time_series_query = proto.Field( + proto.MESSAGE, number=1, message=metrics.TimeSeriesQuery, + ) + + gauge_view = proto.Field( + proto.MESSAGE, number=4, oneof="data_view", message=GaugeView, + ) + + spark_chart_view = proto.Field( + proto.MESSAGE, number=5, oneof="data_view", message=SparkChartView, + ) + + thresholds = proto.RepeatedField( + proto.MESSAGE, number=6, message=metrics.Threshold, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/monitoring_dashboard/v1/proto/layouts_pb2_grpc.py b/google/cloud/monitoring_dashboard_v1/types/service.py similarity index 78% rename from google/cloud/monitoring_dashboard/v1/proto/layouts_pb2_grpc.py rename to google/cloud/monitoring_dashboard_v1/types/service.py index b662812..7c1c138 100644 --- a/google/cloud/monitoring_dashboard/v1/proto/layouts_pb2_grpc.py +++ b/google/cloud/monitoring_dashboard_v1/types/service.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -11,7 +13,10 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +# + + +__protobuf__ = proto.module(package="google.monitoring.dashboard.v1", manifest={},) -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! -import grpc +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/monitoring_dashboard_v1/types/text.py b/google/cloud/monitoring_dashboard_v1/types/text.py new file mode 100644 index 0000000..df10f69 --- /dev/null +++ b/google/cloud/monitoring_dashboard_v1/types/text.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +__protobuf__ = proto.module( + package="google.monitoring.dashboard.v1", manifest={"Text",}, +) + + +class Text(proto.Message): + r"""A widget that displays textual content. + + Attributes: + content (str): + The text content to be displayed. + format (~.text.Text.Format): + How the text content is formatted. + """ + + class Format(proto.Enum): + r"""The format type of the text content.""" + FORMAT_UNSPECIFIED = 0 + MARKDOWN = 1 + RAW = 2 + + content = proto.Field(proto.STRING, number=1) + + format = proto.Field(proto.ENUM, number=2, enum=Format,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/monitoring_dashboard_v1/types/widget.py b/google/cloud/monitoring_dashboard_v1/types/widget.py new file mode 100644 index 0000000..8cd78e4 --- /dev/null +++ b/google/cloud/monitoring_dashboard_v1/types/widget.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.cloud.monitoring_dashboard_v1.types import scorecard as gmd_scorecard +from google.cloud.monitoring_dashboard_v1.types import text as gmd_text +from google.cloud.monitoring_dashboard_v1.types import xychart +from google.protobuf import empty_pb2 as empty # type: ignore + + +__protobuf__ = proto.module( + package="google.monitoring.dashboard.v1", manifest={"Widget",}, +) + + +class Widget(proto.Message): + r"""Widget contains a single dashboard component and + configuration of how to present the component in the dashboard. + + Attributes: + title (str): + Optional. The title of the widget. + xy_chart (~.xychart.XyChart): + A chart of time series data. + scorecard (~.gmd_scorecard.Scorecard): + A scorecard summarizing time series data. + text (~.gmd_text.Text): + A raw string or markdown displaying textual + content. + blank (~.empty.Empty): + A blank space. + """ + + title = proto.Field(proto.STRING, number=1) + + xy_chart = proto.Field( + proto.MESSAGE, number=2, oneof="content", message=xychart.XyChart, + ) + + scorecard = proto.Field( + proto.MESSAGE, number=3, oneof="content", message=gmd_scorecard.Scorecard, + ) + + text = proto.Field(proto.MESSAGE, number=4, oneof="content", message=gmd_text.Text,) + + blank = proto.Field(proto.MESSAGE, number=5, oneof="content", message=empty.Empty,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/monitoring_dashboard_v1/types/xychart.py b/google/cloud/monitoring_dashboard_v1/types/xychart.py new file mode 100644 index 0000000..2b57f7c --- /dev/null +++ b/google/cloud/monitoring_dashboard_v1/types/xychart.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.cloud.monitoring_dashboard_v1.types import metrics +from google.protobuf import duration_pb2 as duration # type: ignore + + +__protobuf__ = proto.module( + package="google.monitoring.dashboard.v1", manifest={"XyChart", "ChartOptions",}, +) + + +class XyChart(proto.Message): + r"""A chart that displays data on a 2D (X and Y axes) plane. + + Attributes: + data_sets (Sequence[~.xychart.XyChart.DataSet]): + Required. The data displayed in this chart. + timeshift_duration (~.duration.Duration): + The duration used to display a comparison + chart. A comparison chart simultaneously shows + values from two similar-length time periods + (e.g., week-over-week metrics). + The duration must be positive, and it can only + be applied to charts with data sets of LINE plot + type. + thresholds (Sequence[~.metrics.Threshold]): + Threshold lines drawn horizontally across the + chart. + x_axis (~.xychart.XyChart.Axis): + The properties applied to the X axis. + y_axis (~.xychart.XyChart.Axis): + The properties applied to the Y axis. + chart_options (~.xychart.ChartOptions): + Display options for the chart. + """ + + class DataSet(proto.Message): + r"""Groups a time series query definition with charting options. + + Attributes: + time_series_query (~.metrics.TimeSeriesQuery): + Required. Fields for querying time series + data from the Stackdriver metrics API. + plot_type (~.xychart.XyChart.DataSet.PlotType): + How this data should be plotted on the chart. + legend_template (str): + A template string for naming ``TimeSeries`` in the resulting + data set. This should be a string with interpolations of the + form ``${label_name}``, which will resolve to the label's + value. + min_alignment_period (~.duration.Duration): + Optional. The lower bound on data point frequency for this + data set, implemented by specifying the minimum alignment + period to use in a time series query For example, if the + data is published once every 10 minutes, the + ``min_alignment_period`` should be at least 10 minutes. It + would not make sense to fetch and align data at one minute + intervals. + """ + + class PlotType(proto.Enum): + r"""The types of plotting strategies for data sets.""" + PLOT_TYPE_UNSPECIFIED = 0 + LINE = 1 + STACKED_AREA = 2 + STACKED_BAR = 3 + HEATMAP = 4 + + time_series_query = proto.Field( + proto.MESSAGE, number=1, message=metrics.TimeSeriesQuery, + ) + + plot_type = proto.Field(proto.ENUM, number=2, enum="XyChart.DataSet.PlotType",) + + legend_template = proto.Field(proto.STRING, number=3) + + min_alignment_period = proto.Field( + proto.MESSAGE, number=4, message=duration.Duration, + ) + + class Axis(proto.Message): + r"""A chart axis. + + Attributes: + label (str): + The label of the axis. + scale (~.xychart.XyChart.Axis.Scale): + The axis scale. By default, a linear scale is + used. + """ + + class Scale(proto.Enum): + r"""Types of scales used in axes.""" + SCALE_UNSPECIFIED = 0 + LINEAR = 1 + LOG10 = 2 + + label = proto.Field(proto.STRING, number=1) + + scale = proto.Field(proto.ENUM, number=2, enum="XyChart.Axis.Scale",) + + data_sets = proto.RepeatedField(proto.MESSAGE, number=1, message=DataSet,) + + timeshift_duration = proto.Field( + proto.MESSAGE, number=4, message=duration.Duration, + ) + + thresholds = proto.RepeatedField( + proto.MESSAGE, number=5, message=metrics.Threshold, + ) + + x_axis = proto.Field(proto.MESSAGE, number=6, message=Axis,) + + y_axis = proto.Field(proto.MESSAGE, number=7, message=Axis,) + + chart_options = proto.Field(proto.MESSAGE, number=8, message="ChartOptions",) + + +class ChartOptions(proto.Message): + r"""Options to control visual rendering of a chart. + + Attributes: + mode (~.xychart.ChartOptions.Mode): + The chart mode. + """ + + class Mode(proto.Enum): + r"""Chart mode options.""" + MODE_UNSPECIFIED = 0 + COLOR = 1 + X_RAY = 2 + STATS = 3 + + mode = proto.Field(proto.ENUM, number=1, enum=Mode,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/monitoring/dashboard/__init__.py b/google/monitoring/dashboard/__init__.py new file mode 100644 index 0000000..06c9061 --- /dev/null +++ b/google/monitoring/dashboard/__init__.py @@ -0,0 +1,84 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.monitoring.dashboard_v1.services.dashboards_service.async_client import ( + DashboardsServiceAsyncClient, +) +from google.monitoring.dashboard_v1.services.dashboards_service.client import ( + DashboardsServiceClient, +) +from google.monitoring.dashboard_v1.types.common import Aggregation +from google.monitoring.dashboard_v1.types.common import PickTimeSeriesFilter +from google.monitoring.dashboard_v1.types.common import StatisticalTimeSeriesFilter +from google.monitoring.dashboard_v1.types.dashboard import Dashboard +from google.monitoring.dashboard_v1.types.dashboards_service import ( + CreateDashboardRequest, +) +from google.monitoring.dashboard_v1.types.dashboards_service import ( + DeleteDashboardRequest, +) +from google.monitoring.dashboard_v1.types.dashboards_service import GetDashboardRequest +from google.monitoring.dashboard_v1.types.dashboards_service import ( + ListDashboardsRequest, +) +from google.monitoring.dashboard_v1.types.dashboards_service import ( + ListDashboardsResponse, +) +from google.monitoring.dashboard_v1.types.dashboards_service import ( + UpdateDashboardRequest, +) +from google.monitoring.dashboard_v1.types.layouts import ColumnLayout +from google.monitoring.dashboard_v1.types.layouts import GridLayout +from google.monitoring.dashboard_v1.types.layouts import RowLayout +from google.monitoring.dashboard_v1.types.metrics import SparkChartType +from google.monitoring.dashboard_v1.types.metrics import Threshold +from google.monitoring.dashboard_v1.types.metrics import TimeSeriesFilter +from google.monitoring.dashboard_v1.types.metrics import TimeSeriesFilterRatio +from google.monitoring.dashboard_v1.types.metrics import TimeSeriesQuery +from google.monitoring.dashboard_v1.types.scorecard import Scorecard +from google.monitoring.dashboard_v1.types.text import Text +from google.monitoring.dashboard_v1.types.widget import Widget +from google.monitoring.dashboard_v1.types.xychart import ChartOptions +from google.monitoring.dashboard_v1.types.xychart import XyChart + +__all__ = ( + "Aggregation", + "ChartOptions", + "ColumnLayout", + "CreateDashboardRequest", + "Dashboard", + "DashboardsServiceAsyncClient", + "DashboardsServiceClient", + "DeleteDashboardRequest", + "GetDashboardRequest", + "GridLayout", + "ListDashboardsRequest", + "ListDashboardsResponse", + "PickTimeSeriesFilter", + "RowLayout", + "Scorecard", + "SparkChartType", + "StatisticalTimeSeriesFilter", + "Text", + "Threshold", + "TimeSeriesFilter", + "TimeSeriesFilterRatio", + "TimeSeriesQuery", + "UpdateDashboardRequest", + "Widget", + "XyChart", +) diff --git a/google/monitoring/dashboard/py.typed b/google/monitoring/dashboard/py.typed new file mode 100644 index 0000000..a52708e --- /dev/null +++ b/google/monitoring/dashboard/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-monitoring-dashboard package uses inline types. diff --git a/google/monitoring/dashboard_v1/__init__.py b/google/monitoring/dashboard_v1/__init__.py new file mode 100644 index 0000000..4c970ee --- /dev/null +++ b/google/monitoring/dashboard_v1/__init__.py @@ -0,0 +1,69 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .services.dashboards_service import DashboardsServiceClient +from .types.common import Aggregation +from .types.common import PickTimeSeriesFilter +from .types.common import StatisticalTimeSeriesFilter +from .types.dashboard import Dashboard +from .types.dashboards_service import CreateDashboardRequest +from .types.dashboards_service import DeleteDashboardRequest +from .types.dashboards_service import GetDashboardRequest +from .types.dashboards_service import ListDashboardsRequest +from .types.dashboards_service import ListDashboardsResponse +from .types.dashboards_service import UpdateDashboardRequest +from .types.layouts import ColumnLayout +from .types.layouts import GridLayout +from .types.layouts import RowLayout +from .types.metrics import SparkChartType +from .types.metrics import Threshold +from .types.metrics import TimeSeriesFilter +from .types.metrics import TimeSeriesFilterRatio +from .types.metrics import TimeSeriesQuery +from .types.scorecard import Scorecard +from .types.text import Text +from .types.widget import Widget +from .types.xychart import ChartOptions +from .types.xychart import XyChart + + +__all__ = ( + "Aggregation", + "ChartOptions", + "ColumnLayout", + "CreateDashboardRequest", + "Dashboard", + "DeleteDashboardRequest", + "GetDashboardRequest", + "GridLayout", + "ListDashboardsRequest", + "ListDashboardsResponse", + "PickTimeSeriesFilter", + "RowLayout", + "Scorecard", + "SparkChartType", + "StatisticalTimeSeriesFilter", + "Text", + "Threshold", + "TimeSeriesFilter", + "TimeSeriesFilterRatio", + "TimeSeriesQuery", + "UpdateDashboardRequest", + "Widget", + "XyChart", + "DashboardsServiceClient", +) diff --git a/google/monitoring/dashboard_v1/py.typed b/google/monitoring/dashboard_v1/py.typed new file mode 100644 index 0000000..a52708e --- /dev/null +++ b/google/monitoring/dashboard_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-monitoring-dashboard package uses inline types. diff --git a/google/cloud/monitoring_dashboard/v1/proto/common_pb2_grpc.py b/google/monitoring/dashboard_v1/services/__init__.py similarity index 87% rename from google/cloud/monitoring_dashboard/v1/proto/common_pb2_grpc.py rename to google/monitoring/dashboard_v1/services/__init__.py index b662812..42ffdf2 100644 --- a/google/cloud/monitoring_dashboard/v1/proto/common_pb2_grpc.py +++ b/google/monitoring/dashboard_v1/services/__init__.py @@ -1,3 +1,5 @@ +# -*- coding: utf-8 -*- + # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); @@ -11,7 +13,4 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! - -import grpc +# diff --git a/google/monitoring/dashboard_v1/services/dashboards_service/__init__.py b/google/monitoring/dashboard_v1/services/dashboards_service/__init__.py new file mode 100644 index 0000000..ad6c65c --- /dev/null +++ b/google/monitoring/dashboard_v1/services/dashboards_service/__init__.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .client import DashboardsServiceClient +from .async_client import DashboardsServiceAsyncClient + +__all__ = ( + "DashboardsServiceClient", + "DashboardsServiceAsyncClient", +) diff --git a/google/monitoring/dashboard_v1/services/dashboards_service/async_client.py b/google/monitoring/dashboard_v1/services/dashboards_service/async_client.py new file mode 100644 index 0000000..01d02ba --- /dev/null +++ b/google/monitoring/dashboard_v1/services/dashboards_service/async_client.py @@ -0,0 +1,391 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.monitoring.dashboard_v1.services.dashboards_service import pagers +from google.monitoring.dashboard_v1.types import dashboard +from google.monitoring.dashboard_v1.types import dashboards_service +from google.monitoring.dashboard_v1.types import layouts + +from .transports.base import DashboardsServiceTransport +from .transports.grpc_asyncio import DashboardsServiceGrpcAsyncIOTransport +from .client import DashboardsServiceClient + + +class DashboardsServiceAsyncClient: + """Manages Stackdriver dashboards. A dashboard is an arrangement + of data display widgets in a specific layout. + """ + + _client: DashboardsServiceClient + + DEFAULT_ENDPOINT = DashboardsServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DashboardsServiceClient.DEFAULT_MTLS_ENDPOINT + + dashboard_path = staticmethod(DashboardsServiceClient.dashboard_path) + + from_service_account_file = DashboardsServiceClient.from_service_account_file + from_service_account_json = from_service_account_file + + get_transport_class = functools.partial( + type(DashboardsServiceClient).get_transport_class, type(DashboardsServiceClient) + ) + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, DashboardsServiceTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + ) -> None: + """Instantiate the dashboards service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DashboardsServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint, this is the default value for + the environment variable) and "auto" (auto switch to the default + mTLS endpoint if client SSL credentials is present). However, + the ``api_endpoint`` property takes precedence if provided. + (2) The ``client_cert_source`` property is used to provide client + SSL credentials for mutual TLS transport. If not provided, the + default SSL credentials will be used if present. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + + self._client = DashboardsServiceClient( + credentials=credentials, transport=transport, client_options=client_options, + ) + + async def create_dashboard( + self, + request: dashboards_service.CreateDashboardRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dashboard.Dashboard: + r"""Creates a new custom dashboard. + + This method requires the ``monitoring.dashboards.create`` + permission on the specified project. For more information, see + `Google Cloud IAM `__. + + Args: + request (:class:`~.dashboards_service.CreateDashboardRequest`): + The request object. The `CreateDashboard` request. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dashboard.Dashboard: + A Google Stackdriver dashboard. + Dashboards define the content and layout + of pages in the Stackdriver web + application. + + """ + # Create or coerce a protobuf request object. + + request = dashboards_service.CreateDashboardRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_dashboard, + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def list_dashboards( + self, + request: dashboards_service.ListDashboardsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDashboardsAsyncPager: + r"""Lists the existing dashboards. + + This method requires the ``monitoring.dashboards.list`` + permission on the specified project. For more information, see + `Google Cloud IAM `__. + + Args: + request (:class:`~.dashboards_service.ListDashboardsRequest`): + The request object. The `ListDashboards` request. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListDashboardsAsyncPager: + The ``ListDashboards`` request. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + + request = dashboards_service.ListDashboardsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_dashboards, + default_timeout=None, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDashboardsAsyncPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_dashboard( + self, + request: dashboards_service.GetDashboardRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dashboard.Dashboard: + r"""Fetches a specific dashboard. + + This method requires the ``monitoring.dashboards.get`` + permission on the specified dashboard. For more information, see + `Google Cloud IAM `__. + + Args: + request (:class:`~.dashboards_service.GetDashboardRequest`): + The request object. The `GetDashboard` request. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dashboard.Dashboard: + A Google Stackdriver dashboard. + Dashboards define the content and layout + of pages in the Stackdriver web + application. + + """ + # Create or coerce a protobuf request object. + + request = dashboards_service.GetDashboardRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_dashboard, + default_timeout=None, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + async def delete_dashboard( + self, + request: dashboards_service.DeleteDashboardRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an existing custom dashboard. + + This method requires the ``monitoring.dashboards.delete`` + permission on the specified dashboard. For more information, see + `Google Cloud IAM `__. + + Args: + request (:class:`~.dashboards_service.DeleteDashboardRequest`): + The request object. The `DeleteDashboard` request. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + + request = dashboards_service.DeleteDashboardRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_dashboard, + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + await rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + async def update_dashboard( + self, + request: dashboards_service.UpdateDashboardRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dashboard.Dashboard: + r"""Replaces an existing custom dashboard with a new definition. + + This method requires the ``monitoring.dashboards.update`` + permission on the specified dashboard. For more information, see + `Google Cloud IAM `__. + + Args: + request (:class:`~.dashboards_service.UpdateDashboardRequest`): + The request object. The `UpdateDashboard` request. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dashboard.Dashboard: + A Google Stackdriver dashboard. + Dashboards define the content and layout + of pages in the Stackdriver web + application. + + """ + # Create or coerce a protobuf request object. + + request = dashboards_service.UpdateDashboardRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.update_dashboard, + default_timeout=30.0, + client_info=_client_info, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("dashboard.name", request.dashboard.name),) + ), + ) + + # Send the request. + response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + _client_info = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-monitoring-dashboard", + ).version, + ) +except pkg_resources.DistributionNotFound: + _client_info = gapic_v1.client_info.ClientInfo() + + +__all__ = ("DashboardsServiceAsyncClient",) diff --git a/google/monitoring/dashboard_v1/services/dashboards_service/client.py b/google/monitoring/dashboard_v1/services/dashboards_service/client.py new file mode 100644 index 0000000..8bfee70 --- /dev/null +++ b/google/monitoring/dashboard_v1/services/dashboards_service/client.py @@ -0,0 +1,510 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +import os +import re +from typing import Callable, Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.monitoring.dashboard_v1.services.dashboards_service import pagers +from google.monitoring.dashboard_v1.types import dashboard +from google.monitoring.dashboard_v1.types import dashboards_service +from google.monitoring.dashboard_v1.types import layouts + +from .transports.base import DashboardsServiceTransport +from .transports.grpc import DashboardsServiceGrpcTransport +from .transports.grpc_asyncio import DashboardsServiceGrpcAsyncIOTransport + + +class DashboardsServiceClientMeta(type): + """Metaclass for the DashboardsService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = ( + OrderedDict() + ) # type: Dict[str, Type[DashboardsServiceTransport]] + _transport_registry["grpc"] = DashboardsServiceGrpcTransport + _transport_registry["grpc_asyncio"] = DashboardsServiceGrpcAsyncIOTransport + + def get_transport_class( + cls, label: str = None, + ) -> Type[DashboardsServiceTransport]: + """Return an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DashboardsServiceClient(metaclass=DashboardsServiceClientMeta): + """Manages Stackdriver dashboards. A dashboard is an arrangement + of data display widgets in a specific layout. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Convert api endpoint to mTLS endpoint. + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "monitoring.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + {@api.name}: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @staticmethod + def dashboard_path(project: str, dashboard: str,) -> str: + """Return a fully-qualified dashboard string.""" + return "projects/{project}/dashboards/{dashboard}".format( + project=project, dashboard=dashboard, + ) + + @staticmethod + def parse_dashboard_path(path: str) -> Dict[str, str]: + """Parse a dashboard path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/dashboards/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__( + self, + *, + credentials: credentials.Credentials = None, + transport: Union[str, DashboardsServiceTransport] = None, + client_options: ClientOptions = None, + ) -> None: + """Instantiate the dashboards service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.DashboardsServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint, this is the default value for + the environment variable) and "auto" (auto switch to the default + mTLS endpoint if client SSL credentials is present). However, + the ``api_endpoint`` property takes precedence if provided. + (2) The ``client_cert_source`` property is used to provide client + SSL credentials for mutual TLS transport. If not provided, the + default SSL credentials will be used if present. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = ClientOptions.from_dict(client_options) + if client_options is None: + client_options = ClientOptions.ClientOptions() + + if client_options.api_endpoint is None: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never") + if use_mtls_env == "never": + client_options.api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + has_client_cert_source = ( + client_options.client_cert_source is not None + or mtls.has_default_client_cert_source() + ) + client_options.api_endpoint = ( + self.DEFAULT_MTLS_ENDPOINT + if has_client_cert_source + else self.DEFAULT_ENDPOINT + ) + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, DashboardsServiceTransport): + # transport is a DashboardsServiceTransport instance. + if credentials or client_options.credentials_file: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if client_options.scopes: + raise ValueError( + "When providing a transport instance, " + "provide its scopes directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=client_options.api_endpoint, + scopes=client_options.scopes, + api_mtls_endpoint=client_options.api_endpoint, + client_cert_source=client_options.client_cert_source, + quota_project_id=client_options.quota_project_id, + ) + + def create_dashboard( + self, + request: dashboards_service.CreateDashboardRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dashboard.Dashboard: + r"""Creates a new custom dashboard. + + This method requires the ``monitoring.dashboards.create`` + permission on the specified project. For more information, see + `Google Cloud IAM `__. + + Args: + request (:class:`~.dashboards_service.CreateDashboardRequest`): + The request object. The `CreateDashboard` request. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dashboard.Dashboard: + A Google Stackdriver dashboard. + Dashboards define the content and layout + of pages in the Stackdriver web + application. + + """ + # Create or coerce a protobuf request object. + + request = dashboards_service.CreateDashboardRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_dashboard] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def list_dashboards( + self, + request: dashboards_service.ListDashboardsRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListDashboardsPager: + r"""Lists the existing dashboards. + + This method requires the ``monitoring.dashboards.list`` + permission on the specified project. For more information, see + `Google Cloud IAM `__. + + Args: + request (:class:`~.dashboards_service.ListDashboardsRequest`): + The request object. The `ListDashboards` request. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.pagers.ListDashboardsPager: + The ``ListDashboards`` request. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + + request = dashboards_service.ListDashboardsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_dashboards] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDashboardsPager( + method=rpc, request=request, response=response, metadata=metadata, + ) + + # Done; return the response. + return response + + def get_dashboard( + self, + request: dashboards_service.GetDashboardRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dashboard.Dashboard: + r"""Fetches a specific dashboard. + + This method requires the ``monitoring.dashboards.get`` + permission on the specified dashboard. For more information, see + `Google Cloud IAM `__. + + Args: + request (:class:`~.dashboards_service.GetDashboardRequest`): + The request object. The `GetDashboard` request. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dashboard.Dashboard: + A Google Stackdriver dashboard. + Dashboards define the content and layout + of pages in the Stackdriver web + application. + + """ + # Create or coerce a protobuf request object. + + request = dashboards_service.GetDashboardRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_dashboard] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + def delete_dashboard( + self, + request: dashboards_service.DeleteDashboardRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes an existing custom dashboard. + + This method requires the ``monitoring.dashboards.delete`` + permission on the specified dashboard. For more information, see + `Google Cloud IAM `__. + + Args: + request (:class:`~.dashboards_service.DeleteDashboardRequest`): + The request object. The `DeleteDashboard` request. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + + request = dashboards_service.DeleteDashboardRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_dashboard] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Send the request. + rpc( + request, retry=retry, timeout=timeout, metadata=metadata, + ) + + def update_dashboard( + self, + request: dashboards_service.UpdateDashboardRequest = None, + *, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> dashboard.Dashboard: + r"""Replaces an existing custom dashboard with a new definition. + + This method requires the ``monitoring.dashboards.update`` + permission on the specified dashboard. For more information, see + `Google Cloud IAM `__. + + Args: + request (:class:`~.dashboards_service.UpdateDashboardRequest`): + The request object. The `UpdateDashboard` request. + + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + ~.dashboard.Dashboard: + A Google Stackdriver dashboard. + Dashboards define the content and layout + of pages in the Stackdriver web + application. + + """ + # Create or coerce a protobuf request object. + + request = dashboards_service.UpdateDashboardRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_dashboard] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("dashboard.name", request.dashboard.name),) + ), + ) + + # Send the request. + response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + + # Done; return the response. + return response + + +try: + _client_info = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-monitoring-dashboard", + ).version, + ) +except pkg_resources.DistributionNotFound: + _client_info = gapic_v1.client_info.ClientInfo() + + +__all__ = ("DashboardsServiceClient",) diff --git a/google/monitoring/dashboard_v1/services/dashboards_service/pagers.py b/google/monitoring/dashboard_v1/services/dashboards_service/pagers.py new file mode 100644 index 0000000..fbf9ea4 --- /dev/null +++ b/google/monitoring/dashboard_v1/services/dashboards_service/pagers.py @@ -0,0 +1,149 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple + +from google.monitoring.dashboard_v1.types import dashboard +from google.monitoring.dashboard_v1.types import dashboards_service + + +class ListDashboardsPager: + """A pager for iterating through ``list_dashboards`` requests. + + This class thinly wraps an initial + :class:`~.dashboards_service.ListDashboardsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``dashboards`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDashboards`` requests and continue to iterate + through the ``dashboards`` field on the + corresponding responses. + + All the usual :class:`~.dashboards_service.ListDashboardsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., dashboards_service.ListDashboardsResponse], + request: dashboards_service.ListDashboardsRequest, + response: dashboards_service.ListDashboardsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.dashboards_service.ListDashboardsRequest`): + The initial request object. + response (:class:`~.dashboards_service.ListDashboardsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dashboards_service.ListDashboardsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[dashboards_service.ListDashboardsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[dashboard.Dashboard]: + for page in self.pages: + yield from page.dashboards + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDashboardsAsyncPager: + """A pager for iterating through ``list_dashboards`` requests. + + This class thinly wraps an initial + :class:`~.dashboards_service.ListDashboardsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``dashboards`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDashboards`` requests and continue to iterate + through the ``dashboards`` field on the + corresponding responses. + + All the usual :class:`~.dashboards_service.ListDashboardsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[dashboards_service.ListDashboardsResponse]], + request: dashboards_service.ListDashboardsRequest, + response: dashboards_service.ListDashboardsResponse, + *, + metadata: Sequence[Tuple[str, str]] = () + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (:class:`~.dashboards_service.ListDashboardsRequest`): + The initial request object. + response (:class:`~.dashboards_service.ListDashboardsResponse`): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = dashboards_service.ListDashboardsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[dashboards_service.ListDashboardsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[dashboard.Dashboard]: + async def async_generator(): + async for page in self.pages: + for response in page.dashboards: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/monitoring/dashboard_v1/services/dashboards_service/transports/__init__.py b/google/monitoring/dashboard_v1/services/dashboards_service/transports/__init__.py new file mode 100644 index 0000000..7057d44 --- /dev/null +++ b/google/monitoring/dashboard_v1/services/dashboards_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from collections import OrderedDict +from typing import Dict, Type + +from .base import DashboardsServiceTransport +from .grpc import DashboardsServiceGrpcTransport +from .grpc_asyncio import DashboardsServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DashboardsServiceTransport]] +_transport_registry["grpc"] = DashboardsServiceGrpcTransport +_transport_registry["grpc_asyncio"] = DashboardsServiceGrpcAsyncIOTransport + + +__all__ = ( + "DashboardsServiceTransport", + "DashboardsServiceGrpcTransport", + "DashboardsServiceGrpcAsyncIOTransport", +) diff --git a/google/monitoring/dashboard_v1/services/dashboards_service/transports/base.py b/google/monitoring/dashboard_v1/services/dashboards_service/transports/base.py new file mode 100644 index 0000000..8c1c3cb --- /dev/null +++ b/google/monitoring/dashboard_v1/services/dashboards_service/transports/base.py @@ -0,0 +1,175 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import abc +import typing +import pkg_resources + +from google import auth +from google.api_core import exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.auth import credentials # type: ignore + +from google.monitoring.dashboard_v1.types import dashboard +from google.monitoring.dashboard_v1.types import dashboards_service +from google.protobuf import empty_pb2 as empty # type: ignore + + +try: + _client_info = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-monitoring-dashboard", + ).version, + ) +except pkg_resources.DistributionNotFound: + _client_info = gapic_v1.client_info.ClientInfo() + + +class DashboardsServiceTransport(abc.ABC): + """Abstract transport class for DashboardsService.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/monitoring", + "https://www.googleapis.com/auth/monitoring.read", + "https://www.googleapis.com/auth/monitoring.write", + ) + + def __init__( + self, + *, + host: str = "monitoring.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: typing.Optional[str] = None, + scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES, + quota_project_id: typing.Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scope (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = auth.load_credentials_from_file( + credentials_file, scopes=scopes, quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = auth.default( + scopes=scopes, quota_project_id=quota_project_id + ) + + # Save the credentials. + self._credentials = credentials + + # Lifted into its own function so it can be stubbed out during tests. + self._prep_wrapped_messages() + + def _prep_wrapped_messages(self): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_dashboard: gapic_v1.method.wrap_method( + self.create_dashboard, default_timeout=30.0, client_info=_client_info, + ), + self.list_dashboards: gapic_v1.method.wrap_method( + self.list_dashboards, default_timeout=None, client_info=_client_info, + ), + self.get_dashboard: gapic_v1.method.wrap_method( + self.get_dashboard, default_timeout=None, client_info=_client_info, + ), + self.delete_dashboard: gapic_v1.method.wrap_method( + self.delete_dashboard, default_timeout=30.0, client_info=_client_info, + ), + self.update_dashboard: gapic_v1.method.wrap_method( + self.update_dashboard, default_timeout=30.0, client_info=_client_info, + ), + } + + @property + def create_dashboard( + self, + ) -> typing.Callable[ + [dashboards_service.CreateDashboardRequest], + typing.Union[dashboard.Dashboard, typing.Awaitable[dashboard.Dashboard]], + ]: + raise NotImplementedError() + + @property + def list_dashboards( + self, + ) -> typing.Callable[ + [dashboards_service.ListDashboardsRequest], + typing.Union[ + dashboards_service.ListDashboardsResponse, + typing.Awaitable[dashboards_service.ListDashboardsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_dashboard( + self, + ) -> typing.Callable[ + [dashboards_service.GetDashboardRequest], + typing.Union[dashboard.Dashboard, typing.Awaitable[dashboard.Dashboard]], + ]: + raise NotImplementedError() + + @property + def delete_dashboard( + self, + ) -> typing.Callable[ + [dashboards_service.DeleteDashboardRequest], + typing.Union[empty.Empty, typing.Awaitable[empty.Empty]], + ]: + raise NotImplementedError() + + @property + def update_dashboard( + self, + ) -> typing.Callable[ + [dashboards_service.UpdateDashboardRequest], + typing.Union[dashboard.Dashboard, typing.Awaitable[dashboard.Dashboard]], + ]: + raise NotImplementedError() + + +__all__ = ("DashboardsServiceTransport",) diff --git a/google/monitoring/dashboard_v1/services/dashboards_service/transports/grpc.py b/google/monitoring/dashboard_v1/services/dashboards_service/transports/grpc.py new file mode 100644 index 0000000..b1b3c55 --- /dev/null +++ b/google/monitoring/dashboard_v1/services/dashboards_service/transports/grpc.py @@ -0,0 +1,363 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers # type: ignore +from google import auth # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + + +import grpc # type: ignore + +from google.monitoring.dashboard_v1.types import dashboard +from google.monitoring.dashboard_v1.types import dashboards_service +from google.protobuf import empty_pb2 as empty # type: ignore + +from .base import DashboardsServiceTransport + + +class DashboardsServiceGrpcTransport(DashboardsServiceTransport): + """gRPC backend transport for DashboardsService. + + Manages Stackdriver dashboards. A dashboard is an arrangement + of data display widgets in a specific layout. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "monitoring.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If + provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A + callback to provide client SSL certificate bytes and private key + bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` + is None. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + if credentials is None: + credentials, _ = auth.default( + scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + self._stubs = {} # type: Dict[str, Callable] + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + @classmethod + def create_channel( + cls, + host: str = "monitoring.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + address (Optionsl[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Sanity check: Only create a new channel if we do not already + # have one. + if not hasattr(self, "_grpc_channel"): + self._grpc_channel = self.create_channel( + self._host, credentials=self._credentials, + ) + + # Return the channel from cache. + return self._grpc_channel + + @property + def create_dashboard( + self, + ) -> Callable[[dashboards_service.CreateDashboardRequest], dashboard.Dashboard]: + r"""Return a callable for the create dashboard method over gRPC. + + Creates a new custom dashboard. + + This method requires the ``monitoring.dashboards.create`` + permission on the specified project. For more information, see + `Google Cloud IAM `__. + + Returns: + Callable[[~.CreateDashboardRequest], + ~.Dashboard]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_dashboard" not in self._stubs: + self._stubs["create_dashboard"] = self.grpc_channel.unary_unary( + "/google.monitoring.dashboard.v1.DashboardsService/CreateDashboard", + request_serializer=dashboards_service.CreateDashboardRequest.serialize, + response_deserializer=dashboard.Dashboard.deserialize, + ) + return self._stubs["create_dashboard"] + + @property + def list_dashboards( + self, + ) -> Callable[ + [dashboards_service.ListDashboardsRequest], + dashboards_service.ListDashboardsResponse, + ]: + r"""Return a callable for the list dashboards method over gRPC. + + Lists the existing dashboards. + + This method requires the ``monitoring.dashboards.list`` + permission on the specified project. For more information, see + `Google Cloud IAM `__. + + Returns: + Callable[[~.ListDashboardsRequest], + ~.ListDashboardsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_dashboards" not in self._stubs: + self._stubs["list_dashboards"] = self.grpc_channel.unary_unary( + "/google.monitoring.dashboard.v1.DashboardsService/ListDashboards", + request_serializer=dashboards_service.ListDashboardsRequest.serialize, + response_deserializer=dashboards_service.ListDashboardsResponse.deserialize, + ) + return self._stubs["list_dashboards"] + + @property + def get_dashboard( + self, + ) -> Callable[[dashboards_service.GetDashboardRequest], dashboard.Dashboard]: + r"""Return a callable for the get dashboard method over gRPC. + + Fetches a specific dashboard. + + This method requires the ``monitoring.dashboards.get`` + permission on the specified dashboard. For more information, see + `Google Cloud IAM `__. + + Returns: + Callable[[~.GetDashboardRequest], + ~.Dashboard]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_dashboard" not in self._stubs: + self._stubs["get_dashboard"] = self.grpc_channel.unary_unary( + "/google.monitoring.dashboard.v1.DashboardsService/GetDashboard", + request_serializer=dashboards_service.GetDashboardRequest.serialize, + response_deserializer=dashboard.Dashboard.deserialize, + ) + return self._stubs["get_dashboard"] + + @property + def delete_dashboard( + self, + ) -> Callable[[dashboards_service.DeleteDashboardRequest], empty.Empty]: + r"""Return a callable for the delete dashboard method over gRPC. + + Deletes an existing custom dashboard. + + This method requires the ``monitoring.dashboards.delete`` + permission on the specified dashboard. For more information, see + `Google Cloud IAM `__. + + Returns: + Callable[[~.DeleteDashboardRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_dashboard" not in self._stubs: + self._stubs["delete_dashboard"] = self.grpc_channel.unary_unary( + "/google.monitoring.dashboard.v1.DashboardsService/DeleteDashboard", + request_serializer=dashboards_service.DeleteDashboardRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_dashboard"] + + @property + def update_dashboard( + self, + ) -> Callable[[dashboards_service.UpdateDashboardRequest], dashboard.Dashboard]: + r"""Return a callable for the update dashboard method over gRPC. + + Replaces an existing custom dashboard with a new definition. + + This method requires the ``monitoring.dashboards.update`` + permission on the specified dashboard. For more information, see + `Google Cloud IAM `__. + + Returns: + Callable[[~.UpdateDashboardRequest], + ~.Dashboard]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_dashboard" not in self._stubs: + self._stubs["update_dashboard"] = self.grpc_channel.unary_unary( + "/google.monitoring.dashboard.v1.DashboardsService/UpdateDashboard", + request_serializer=dashboards_service.UpdateDashboardRequest.serialize, + response_deserializer=dashboard.Dashboard.deserialize, + ) + return self._stubs["update_dashboard"] + + +__all__ = ("DashboardsServiceGrpcTransport",) diff --git a/google/monitoring/dashboard_v1/services/dashboards_service/transports/grpc_asyncio.py b/google/monitoring/dashboard_v1/services/dashboards_service/transports/grpc_asyncio.py new file mode 100644 index 0000000..5d71665 --- /dev/null +++ b/google/monitoring/dashboard_v1/services/dashboards_service/transports/grpc_asyncio.py @@ -0,0 +1,362 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple + +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.monitoring.dashboard_v1.types import dashboard +from google.monitoring.dashboard_v1.types import dashboards_service +from google.protobuf import empty_pb2 as empty # type: ignore + +from .base import DashboardsServiceTransport +from .grpc import DashboardsServiceGrpcTransport + + +class DashboardsServiceGrpcAsyncIOTransport(DashboardsServiceTransport): + """gRPC AsyncIO backend transport for DashboardsService. + + Manages Stackdriver dashboards. A dashboard is an arrangement + of data display widgets in a specific layout. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "monitoring.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + address (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + scopes = scopes or cls.AUTH_SCOPES + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "monitoring.googleapis.com", + credentials: credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If + provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A + callback to provide client SSL certificate bytes and private key + bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` + is None. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + if channel: + # Sanity check: Ensure that channel and credentials are not both + # provided. + credentials = False + + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + elif api_mtls_endpoint: + host = ( + api_mtls_endpoint + if ":" in api_mtls_endpoint + else api_mtls_endpoint + ":443" + ) + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + ssl_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + ssl_credentials = SslCredentials().ssl_credentials + + # create a new channel. The provided one is ignored. + self._grpc_channel = type(self).create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + ssl_credentials=ssl_credentials, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + # Run the base constructor. + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes or self.AUTH_SCOPES, + quota_project_id=quota_project_id, + ) + + self._stubs = {} + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Sanity check: Only create a new channel if we do not already + # have one. + if not hasattr(self, "_grpc_channel"): + self._grpc_channel = self.create_channel( + self._host, credentials=self._credentials, + ) + + # Return the channel from cache. + return self._grpc_channel + + @property + def create_dashboard( + self, + ) -> Callable[ + [dashboards_service.CreateDashboardRequest], Awaitable[dashboard.Dashboard] + ]: + r"""Return a callable for the create dashboard method over gRPC. + + Creates a new custom dashboard. + + This method requires the ``monitoring.dashboards.create`` + permission on the specified project. For more information, see + `Google Cloud IAM `__. + + Returns: + Callable[[~.CreateDashboardRequest], + Awaitable[~.Dashboard]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_dashboard" not in self._stubs: + self._stubs["create_dashboard"] = self.grpc_channel.unary_unary( + "/google.monitoring.dashboard.v1.DashboardsService/CreateDashboard", + request_serializer=dashboards_service.CreateDashboardRequest.serialize, + response_deserializer=dashboard.Dashboard.deserialize, + ) + return self._stubs["create_dashboard"] + + @property + def list_dashboards( + self, + ) -> Callable[ + [dashboards_service.ListDashboardsRequest], + Awaitable[dashboards_service.ListDashboardsResponse], + ]: + r"""Return a callable for the list dashboards method over gRPC. + + Lists the existing dashboards. + + This method requires the ``monitoring.dashboards.list`` + permission on the specified project. For more information, see + `Google Cloud IAM `__. + + Returns: + Callable[[~.ListDashboardsRequest], + Awaitable[~.ListDashboardsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_dashboards" not in self._stubs: + self._stubs["list_dashboards"] = self.grpc_channel.unary_unary( + "/google.monitoring.dashboard.v1.DashboardsService/ListDashboards", + request_serializer=dashboards_service.ListDashboardsRequest.serialize, + response_deserializer=dashboards_service.ListDashboardsResponse.deserialize, + ) + return self._stubs["list_dashboards"] + + @property + def get_dashboard( + self, + ) -> Callable[ + [dashboards_service.GetDashboardRequest], Awaitable[dashboard.Dashboard] + ]: + r"""Return a callable for the get dashboard method over gRPC. + + Fetches a specific dashboard. + + This method requires the ``monitoring.dashboards.get`` + permission on the specified dashboard. For more information, see + `Google Cloud IAM `__. + + Returns: + Callable[[~.GetDashboardRequest], + Awaitable[~.Dashboard]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_dashboard" not in self._stubs: + self._stubs["get_dashboard"] = self.grpc_channel.unary_unary( + "/google.monitoring.dashboard.v1.DashboardsService/GetDashboard", + request_serializer=dashboards_service.GetDashboardRequest.serialize, + response_deserializer=dashboard.Dashboard.deserialize, + ) + return self._stubs["get_dashboard"] + + @property + def delete_dashboard( + self, + ) -> Callable[[dashboards_service.DeleteDashboardRequest], Awaitable[empty.Empty]]: + r"""Return a callable for the delete dashboard method over gRPC. + + Deletes an existing custom dashboard. + + This method requires the ``monitoring.dashboards.delete`` + permission on the specified dashboard. For more information, see + `Google Cloud IAM `__. + + Returns: + Callable[[~.DeleteDashboardRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_dashboard" not in self._stubs: + self._stubs["delete_dashboard"] = self.grpc_channel.unary_unary( + "/google.monitoring.dashboard.v1.DashboardsService/DeleteDashboard", + request_serializer=dashboards_service.DeleteDashboardRequest.serialize, + response_deserializer=empty.Empty.FromString, + ) + return self._stubs["delete_dashboard"] + + @property + def update_dashboard( + self, + ) -> Callable[ + [dashboards_service.UpdateDashboardRequest], Awaitable[dashboard.Dashboard] + ]: + r"""Return a callable for the update dashboard method over gRPC. + + Replaces an existing custom dashboard with a new definition. + + This method requires the ``monitoring.dashboards.update`` + permission on the specified dashboard. For more information, see + `Google Cloud IAM `__. + + Returns: + Callable[[~.UpdateDashboardRequest], + Awaitable[~.Dashboard]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_dashboard" not in self._stubs: + self._stubs["update_dashboard"] = self.grpc_channel.unary_unary( + "/google.monitoring.dashboard.v1.DashboardsService/UpdateDashboard", + request_serializer=dashboards_service.UpdateDashboardRequest.serialize, + response_deserializer=dashboard.Dashboard.deserialize, + ) + return self._stubs["update_dashboard"] + + +__all__ = ("DashboardsServiceGrpcAsyncIOTransport",) diff --git a/google/monitoring/dashboard_v1/types/__init__.py b/google/monitoring/dashboard_v1/types/__init__.py new file mode 100644 index 0000000..c963fe0 --- /dev/null +++ b/google/monitoring/dashboard_v1/types/__init__.py @@ -0,0 +1,75 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .common import ( + Aggregation, + PickTimeSeriesFilter, + StatisticalTimeSeriesFilter, +) +from .metrics import ( + TimeSeriesQuery, + TimeSeriesFilter, + TimeSeriesFilterRatio, + Threshold, +) +from .scorecard import Scorecard +from .text import Text +from .xychart import ( + XyChart, + ChartOptions, +) +from .widget import Widget +from .layouts import ( + GridLayout, + RowLayout, + ColumnLayout, +) +from .dashboard import Dashboard +from .dashboards_service import ( + CreateDashboardRequest, + ListDashboardsRequest, + ListDashboardsResponse, + GetDashboardRequest, + DeleteDashboardRequest, + UpdateDashboardRequest, +) + + +__all__ = ( + "Aggregation", + "PickTimeSeriesFilter", + "StatisticalTimeSeriesFilter", + "TimeSeriesQuery", + "TimeSeriesFilter", + "TimeSeriesFilterRatio", + "Threshold", + "Scorecard", + "Text", + "XyChart", + "ChartOptions", + "Widget", + "GridLayout", + "RowLayout", + "ColumnLayout", + "Dashboard", + "CreateDashboardRequest", + "ListDashboardsRequest", + "ListDashboardsResponse", + "GetDashboardRequest", + "DeleteDashboardRequest", + "UpdateDashboardRequest", +) diff --git a/google/monitoring/dashboard_v1/types/common.py b/google/monitoring/dashboard_v1/types/common.py new file mode 100644 index 0000000..b90f9c6 --- /dev/null +++ b/google/monitoring/dashboard_v1/types/common.py @@ -0,0 +1,270 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.protobuf import duration_pb2 as duration # type: ignore + + +__protobuf__ = proto.module( + package="google.monitoring.dashboard.v1", + manifest={"Aggregation", "PickTimeSeriesFilter", "StatisticalTimeSeriesFilter",}, +) + + +class Aggregation(proto.Message): + r"""Describes how to combine multiple time series to provide a different + view of the data. Aggregation of time series is done in two steps. + First, each time series in the set is *aligned* to the same time + interval boundaries, then the set of time series is optionally + *reduced* in number. + + Alignment consists of applying the ``per_series_aligner`` operation + to each time series after its data has been divided into regular + ``alignment_period`` time intervals. This process takes *all* of the + data points in an alignment period, applies a mathematical + transformation such as averaging, minimum, maximum, delta, etc., and + converts them into a single data point per period. + + Reduction is when the aligned and transformed time series can + optionally be combined, reducing the number of time series through + similar mathematical transformations. Reduction involves applying a + ``cross_series_reducer`` to all the time series, optionally sorting + the time series into subsets with ``group_by_fields``, and applying + the reducer to each subset. + + The raw time series data can contain a huge amount of information + from multiple sources. Alignment and reduction transforms this mass + of data into a more manageable and representative collection of + data, for example "the 95% latency across the average of all tasks + in a cluster". This representative data can be more easily graphed + and comprehended, and the individual time series data is still + available for later drilldown. For more details, see `Filtering and + aggregation `__. + + Attributes: + alignment_period (~.duration.Duration): + The ``alignment_period`` specifies a time interval, in + seconds, that is used to divide the data in all the [time + series][google.monitoring.v3.TimeSeries] into consistent + blocks of time. This will be done before the per-series + aligner can be applied to the data. + + The value must be at least 60 seconds. If a per-series + aligner other than ``ALIGN_NONE`` is specified, this field + is required or an error is returned. If no per-series + aligner is specified, or the aligner ``ALIGN_NONE`` is + specified, then this field is ignored. + per_series_aligner (~.common.Aggregation.Aligner): + An ``Aligner`` describes how to bring the data points in a + single time series into temporal alignment. Except for + ``ALIGN_NONE``, all alignments cause all the data points in + an ``alignment_period`` to be mathematically grouped + together, resulting in a single data point for each + ``alignment_period`` with end timestamp at the end of the + period. + + Not all alignment operations may be applied to all time + series. The valid choices depend on the ``metric_kind`` and + ``value_type`` of the original time series. Alignment can + change the ``metric_kind`` or the ``value_type`` of the time + series. + + Time series data must be aligned in order to perform + cross-time series reduction. If ``cross_series_reducer`` is + specified, then ``per_series_aligner`` must be specified and + not equal to ``ALIGN_NONE`` and ``alignment_period`` must be + specified; otherwise, an error is returned. + cross_series_reducer (~.common.Aggregation.Reducer): + The reduction operation to be used to combine time series + into a single time series, where the value of each data + point in the resulting series is a function of all the + already aligned values in the input time series. + + Not all reducer operations can be applied to all time + series. The valid choices depend on the ``metric_kind`` and + the ``value_type`` of the original time series. Reduction + can yield a time series with a different ``metric_kind`` or + ``value_type`` than the input time series. + + Time series data must first be aligned (see + ``per_series_aligner``) in order to perform cross-time + series reduction. If ``cross_series_reducer`` is specified, + then ``per_series_aligner`` must be specified, and must not + be ``ALIGN_NONE``. An ``alignment_period`` must also be + specified; otherwise, an error is returned. + group_by_fields (Sequence[str]): + The set of fields to preserve when ``cross_series_reducer`` + is specified. The ``group_by_fields`` determine how the time + series are partitioned into subsets prior to applying the + aggregation operation. Each subset contains time series that + have the same value for each of the grouping fields. Each + individual time series is a member of exactly one subset. + The ``cross_series_reducer`` is applied to each subset of + time series. It is not possible to reduce across different + resource types, so this field implicitly contains + ``resource.type``. Fields not specified in + ``group_by_fields`` are aggregated away. If + ``group_by_fields`` is not specified and all the time series + have the same resource type, then the time series are + aggregated into a single output time series. If + ``cross_series_reducer`` is not defined, this field is + ignored. + """ + + class Aligner(proto.Enum): + r"""The ``Aligner`` specifies the operation that will be applied to the + data points in each alignment period in a time series. Except for + ``ALIGN_NONE``, which specifies that no operation be applied, each + alignment operation replaces the set of data values in each + alignment period with a single value: the result of applying the + operation to the data values. An aligned time series has a single + data value at the end of each ``alignment_period``. + + An alignment operation can change the data type of the values, too. + For example, if you apply a counting operation to boolean values, + the data ``value_type`` in the original time series is ``BOOLEAN``, + but the ``value_type`` in the aligned result is ``INT64``. + """ + ALIGN_NONE = 0 + ALIGN_DELTA = 1 + ALIGN_RATE = 2 + ALIGN_INTERPOLATE = 3 + ALIGN_NEXT_OLDER = 4 + ALIGN_MIN = 10 + ALIGN_MAX = 11 + ALIGN_MEAN = 12 + ALIGN_COUNT = 13 + ALIGN_SUM = 14 + ALIGN_STDDEV = 15 + ALIGN_COUNT_TRUE = 16 + ALIGN_COUNT_FALSE = 24 + ALIGN_FRACTION_TRUE = 17 + ALIGN_PERCENTILE_99 = 18 + ALIGN_PERCENTILE_95 = 19 + ALIGN_PERCENTILE_50 = 20 + ALIGN_PERCENTILE_05 = 21 + ALIGN_PERCENT_CHANGE = 23 + + class Reducer(proto.Enum): + r"""A Reducer operation describes how to aggregate data points + from multiple time series into a single time series, where the + value of each data point in the resulting series is a function + of all the already aligned values in the input time series. + """ + REDUCE_NONE = 0 + REDUCE_MEAN = 1 + REDUCE_MIN = 2 + REDUCE_MAX = 3 + REDUCE_SUM = 4 + REDUCE_STDDEV = 5 + REDUCE_COUNT = 6 + REDUCE_COUNT_TRUE = 7 + REDUCE_COUNT_FALSE = 15 + REDUCE_FRACTION_TRUE = 8 + REDUCE_PERCENTILE_99 = 9 + REDUCE_PERCENTILE_95 = 10 + REDUCE_PERCENTILE_50 = 11 + REDUCE_PERCENTILE_05 = 12 + + alignment_period = proto.Field(proto.MESSAGE, number=1, message=duration.Duration,) + + per_series_aligner = proto.Field(proto.ENUM, number=2, enum=Aligner,) + + cross_series_reducer = proto.Field(proto.ENUM, number=4, enum=Reducer,) + + group_by_fields = proto.RepeatedField(proto.STRING, number=5) + + +class PickTimeSeriesFilter(proto.Message): + r"""Describes a ranking-based time series filter. Each input time series + is ranked with an aligner. The filter will allow up to + ``num_time_series`` time series to pass through it, selecting them + based on the relative ranking. + + For example, if ``ranking_method`` is + ``METHOD_MEAN``,\ ``direction`` is ``BOTTOM``, and + ``num_time_series`` is 3, then the 3 times series with the lowest + mean values will pass through the filter. + + Attributes: + ranking_method (~.common.PickTimeSeriesFilter.Method): + ``ranking_method`` is applied to each time series + independently to produce the value which will be used to + compare the time series to other time series. + num_time_series (int): + How many time series to allow to pass through + the filter. + direction (~.common.PickTimeSeriesFilter.Direction): + How to use the ranking to select time series + that pass through the filter. + """ + + class Method(proto.Enum): + r"""The value reducers that can be applied to a + ``PickTimeSeriesFilter``. + """ + METHOD_UNSPECIFIED = 0 + METHOD_MEAN = 1 + METHOD_MAX = 2 + METHOD_MIN = 3 + METHOD_SUM = 4 + METHOD_LATEST = 5 + + class Direction(proto.Enum): + r"""Describes the ranking directions.""" + DIRECTION_UNSPECIFIED = 0 + TOP = 1 + BOTTOM = 2 + + ranking_method = proto.Field(proto.ENUM, number=1, enum=Method,) + + num_time_series = proto.Field(proto.INT32, number=2) + + direction = proto.Field(proto.ENUM, number=3, enum=Direction,) + + +class StatisticalTimeSeriesFilter(proto.Message): + r"""A filter that ranks streams based on their statistical + relation to other streams in a request. + Note: This field is deprecated and completely ignored by the + API. + + Attributes: + ranking_method (~.common.StatisticalTimeSeriesFilter.Method): + ``rankingMethod`` is applied to a set of time series, and + then the produced value for each individual time series is + used to compare a given time series to others. These are + methods that cannot be applied stream-by-stream, but rather + require the full context of a request to evaluate time + series. + num_time_series (int): + How many time series to output. + """ + + class Method(proto.Enum): + r"""The filter methods that can be applied to a stream.""" + METHOD_UNSPECIFIED = 0 + METHOD_CLUSTER_OUTLIER = 1 + + ranking_method = proto.Field(proto.ENUM, number=1, enum=Method,) + + num_time_series = proto.Field(proto.INT32, number=2) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/monitoring/dashboard_v1/types/dashboard.py b/google/monitoring/dashboard_v1/types/dashboard.py new file mode 100644 index 0000000..2f7439d --- /dev/null +++ b/google/monitoring/dashboard_v1/types/dashboard.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.monitoring.dashboard_v1.types import layouts + + +__protobuf__ = proto.module( + package="google.monitoring.dashboard.v1", manifest={"Dashboard",}, +) + + +class Dashboard(proto.Message): + r"""A Google Stackdriver dashboard. Dashboards define the content + and layout of pages in the Stackdriver web application. + + Attributes: + name (str): + Immutable. The resource name of the + dashboard. + display_name (str): + Required. The mutable, human-readable name. + etag (str): + ``etag`` is used for optimistic concurrency control as a way + to help prevent simultaneous updates of a policy from + overwriting each other. An ``etag`` is returned in the + response to ``GetDashboard``, and users are expected to put + that etag in the request to ``UpdateDashboard`` to ensure + that their change will be applied to the same version of the + Dashboard configuration. The field should not be passed + during dashboard creation. + grid_layout (~.layouts.GridLayout): + Content is arranged with a basic layout that + re-flows a simple list of informational elements + like widgets or tiles. + row_layout (~.layouts.RowLayout): + The content is divided into equally spaced + rows and the widgets are arranged horizontally. + column_layout (~.layouts.ColumnLayout): + The content is divided into equally spaced + columns and the widgets are arranged vertically. + """ + + name = proto.Field(proto.STRING, number=1) + + display_name = proto.Field(proto.STRING, number=2) + + etag = proto.Field(proto.STRING, number=4) + + grid_layout = proto.Field( + proto.MESSAGE, number=5, oneof="layout", message=layouts.GridLayout, + ) + + row_layout = proto.Field( + proto.MESSAGE, number=8, oneof="layout", message=layouts.RowLayout, + ) + + column_layout = proto.Field( + proto.MESSAGE, number=9, oneof="layout", message=layouts.ColumnLayout, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/monitoring/dashboard_v1/types/dashboards_service.py b/google/monitoring/dashboard_v1/types/dashboards_service.py new file mode 100644 index 0000000..1685c6e --- /dev/null +++ b/google/monitoring/dashboard_v1/types/dashboards_service.py @@ -0,0 +1,157 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.monitoring.dashboard_v1.types import dashboard as gmd_dashboard + + +__protobuf__ = proto.module( + package="google.monitoring.dashboard.v1", + manifest={ + "CreateDashboardRequest", + "ListDashboardsRequest", + "ListDashboardsResponse", + "GetDashboardRequest", + "DeleteDashboardRequest", + "UpdateDashboardRequest", + }, +) + + +class CreateDashboardRequest(proto.Message): + r"""The ``CreateDashboard`` request. + + Attributes: + parent (str): + Required. The project on which to execute the request. The + format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + + The ``[PROJECT_ID_OR_NUMBER]`` must match the dashboard + resource name. + dashboard (~.gmd_dashboard.Dashboard): + Required. The initial dashboard + specification. + """ + + parent = proto.Field(proto.STRING, number=1) + + dashboard = proto.Field(proto.MESSAGE, number=2, message=gmd_dashboard.Dashboard,) + + +class ListDashboardsRequest(proto.Message): + r"""The ``ListDashboards`` request. + + Attributes: + parent (str): + Required. The scope of the dashboards to list. The format + is: + + :: + + projects/[PROJECT_ID_OR_NUMBER] + page_size (int): + A positive number that is the maximum number + of results to return. If unspecified, a default + of 1000 is used. + page_token (str): + If this field is not empty then it must contain the + ``nextPageToken`` value returned by a previous call to this + method. Using this field causes the method to return + additional results from the previous method call. + """ + + parent = proto.Field(proto.STRING, number=1) + + page_size = proto.Field(proto.INT32, number=2) + + page_token = proto.Field(proto.STRING, number=3) + + +class ListDashboardsResponse(proto.Message): + r"""The ``ListDashboards`` request. + + Attributes: + dashboards (Sequence[~.gmd_dashboard.Dashboard]): + The list of requested dashboards. + next_page_token (str): + If there are more results than have been returned, then this + field is set to a non-empty value. To see the additional + results, use that value as ``page_token`` in the next call + to this method. + """ + + @property + def raw_page(self): + return self + + dashboards = proto.RepeatedField( + proto.MESSAGE, number=1, message=gmd_dashboard.Dashboard, + ) + + next_page_token = proto.Field(proto.STRING, number=2) + + +class GetDashboardRequest(proto.Message): + r"""The ``GetDashboard`` request. + + Attributes: + name (str): + Required. The resource name of the Dashboard. The format is + one of: + + - ``dashboards/[DASHBOARD_ID]`` (for system dashboards) + - ``projects/[PROJECT_ID_OR_NUMBER]/dashboards/[DASHBOARD_ID]`` + (for custom dashboards). + """ + + name = proto.Field(proto.STRING, number=1) + + +class DeleteDashboardRequest(proto.Message): + r"""The ``DeleteDashboard`` request. + + Attributes: + name (str): + Required. The resource name of the Dashboard. The format is: + + :: + + projects/[PROJECT_ID_OR_NUMBER]/dashboards/[DASHBOARD_ID] + """ + + name = proto.Field(proto.STRING, number=1) + + +class UpdateDashboardRequest(proto.Message): + r"""The ``UpdateDashboard`` request. + + Attributes: + dashboard (~.gmd_dashboard.Dashboard): + Required. The dashboard that will replace the + existing dashboard. + """ + + dashboard = proto.Field(proto.MESSAGE, number=1, message=gmd_dashboard.Dashboard,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/monitoring/dashboard_v1/types/drilldowns.py b/google/monitoring/dashboard_v1/types/drilldowns.py new file mode 100644 index 0000000..7c1c138 --- /dev/null +++ b/google/monitoring/dashboard_v1/types/drilldowns.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +__protobuf__ = proto.module(package="google.monitoring.dashboard.v1", manifest={},) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/monitoring/dashboard_v1/types/layouts.py b/google/monitoring/dashboard_v1/types/layouts.py new file mode 100644 index 0000000..5a12f9a --- /dev/null +++ b/google/monitoring/dashboard_v1/types/layouts.py @@ -0,0 +1,115 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.monitoring.dashboard_v1.types import widget + + +__protobuf__ = proto.module( + package="google.monitoring.dashboard.v1", + manifest={"GridLayout", "RowLayout", "ColumnLayout",}, +) + + +class GridLayout(proto.Message): + r"""A basic layout divides the available space into vertical + columns of equal width and arranges a list of widgets using a + row-first strategy. + + Attributes: + columns (int): + The number of columns into which the view's + width is divided. If omitted or set to zero, a + system default will be used while rendering. + widgets (Sequence[~.widget.Widget]): + The informational elements that are arranged + into the columns row-first. + """ + + columns = proto.Field(proto.INT64, number=1) + + widgets = proto.RepeatedField(proto.MESSAGE, number=2, message=widget.Widget,) + + +class RowLayout(proto.Message): + r"""A simplified layout that divides the available space into + rows and arranges a set of widgets horizontally in each row. + + Attributes: + rows (Sequence[~.layouts.RowLayout.Row]): + The rows of content to display. + """ + + class Row(proto.Message): + r"""Defines the layout properties and content for a row. + + Attributes: + weight (int): + The relative weight of this row. The row + weight is used to adjust the height of rows on + the screen (relative to peers). Greater the + weight, greater the height of the row on the + screen. If omitted, a value of 1 is used while + rendering. + widgets (Sequence[~.widget.Widget]): + The display widgets arranged horizontally in + this row. + """ + + weight = proto.Field(proto.INT64, number=1) + + widgets = proto.RepeatedField(proto.MESSAGE, number=2, message=widget.Widget,) + + rows = proto.RepeatedField(proto.MESSAGE, number=1, message=Row,) + + +class ColumnLayout(proto.Message): + r"""A simplified layout that divides the available space into + vertical columns and arranges a set of widgets vertically in + each column. + + Attributes: + columns (Sequence[~.layouts.ColumnLayout.Column]): + The columns of content to display. + """ + + class Column(proto.Message): + r"""Defines the layout properties and content for a column. + + Attributes: + weight (int): + The relative weight of this column. The + column weight is used to adjust the width of + columns on the screen (relative to peers). + Greater the weight, greater the width of the + column on the screen. If omitted, a value of 1 + is used while rendering. + widgets (Sequence[~.widget.Widget]): + The display widgets arranged vertically in + this column. + """ + + weight = proto.Field(proto.INT64, number=1) + + widgets = proto.RepeatedField(proto.MESSAGE, number=2, message=widget.Widget,) + + columns = proto.RepeatedField(proto.MESSAGE, number=1, message=Column,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/monitoring/dashboard_v1/types/metrics.py b/google/monitoring/dashboard_v1/types/metrics.py new file mode 100644 index 0000000..a97a9ad --- /dev/null +++ b/google/monitoring/dashboard_v1/types/metrics.py @@ -0,0 +1,236 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.monitoring.dashboard_v1.types import common + + +__protobuf__ = proto.module( + package="google.monitoring.dashboard.v1", + manifest={ + "SparkChartType", + "TimeSeriesQuery", + "TimeSeriesFilter", + "TimeSeriesFilterRatio", + "Threshold", + }, +) + + +class SparkChartType(proto.Enum): + r"""Defines the possible types of spark chart supported by the + ``Scorecard``. + """ + SPARK_CHART_TYPE_UNSPECIFIED = 0 + SPARK_LINE = 1 + SPARK_BAR = 2 + + +class TimeSeriesQuery(proto.Message): + r"""TimeSeriesQuery collects the set of supported methods for + querying time series data from the Stackdriver metrics API. + + Attributes: + time_series_filter (~.metrics.TimeSeriesFilter): + Filter parameters to fetch time series. + time_series_filter_ratio (~.metrics.TimeSeriesFilterRatio): + Parameters to fetch a ratio between two time + series filters. + time_series_query_language (str): + A query used to fetch time series. + unit_override (str): + The unit of data contained in fetched time series. If + non-empty, this unit will override any unit that accompanies + fetched data. The format is the same as the + ```unit`` `__ + field in ``MetricDescriptor``. + """ + + time_series_filter = proto.Field( + proto.MESSAGE, number=1, oneof="source", message="TimeSeriesFilter", + ) + + time_series_filter_ratio = proto.Field( + proto.MESSAGE, number=2, oneof="source", message="TimeSeriesFilterRatio", + ) + + time_series_query_language = proto.Field(proto.STRING, number=3, oneof="source") + + unit_override = proto.Field(proto.STRING, number=5) + + +class TimeSeriesFilter(proto.Message): + r"""A filter that defines a subset of time series data that is displayed + in a widget. Time series data is fetched using the + ```ListTimeSeries`` `__ + method. + + Attributes: + filter (str): + Required. The `monitoring + filter `__ + that identifies the metric types, resources, and projects to + query. + aggregation (~.common.Aggregation): + By default, the raw time series data is + returned. Use this field to combine multiple + time series for different views of the data. + secondary_aggregation (~.common.Aggregation): + Apply a second aggregation after ``aggregation`` is applied. + pick_time_series_filter (~.common.PickTimeSeriesFilter): + Ranking based time series filter. + statistical_time_series_filter (~.common.StatisticalTimeSeriesFilter): + Statistics based time series filter. + Note: This field is deprecated and completely + ignored by the API. + """ + + filter = proto.Field(proto.STRING, number=1) + + aggregation = proto.Field(proto.MESSAGE, number=2, message=common.Aggregation,) + + secondary_aggregation = proto.Field( + proto.MESSAGE, number=3, message=common.Aggregation, + ) + + pick_time_series_filter = proto.Field( + proto.MESSAGE, + number=4, + oneof="output_filter", + message=common.PickTimeSeriesFilter, + ) + + statistical_time_series_filter = proto.Field( + proto.MESSAGE, + number=5, + oneof="output_filter", + message=common.StatisticalTimeSeriesFilter, + ) + + +class TimeSeriesFilterRatio(proto.Message): + r"""A pair of time series filters that define a ratio + computation. The output time series is the pair-wise division of + each aligned element from the numerator and denominator time + series. + + Attributes: + numerator (~.metrics.TimeSeriesFilterRatio.RatioPart): + The numerator of the ratio. + denominator (~.metrics.TimeSeriesFilterRatio.RatioPart): + The denominator of the ratio. + secondary_aggregation (~.common.Aggregation): + Apply a second aggregation after the ratio is + computed. + pick_time_series_filter (~.common.PickTimeSeriesFilter): + Ranking based time series filter. + statistical_time_series_filter (~.common.StatisticalTimeSeriesFilter): + Statistics based time series filter. + Note: This field is deprecated and completely + ignored by the API. + """ + + class RatioPart(proto.Message): + r"""Describes a query to build the numerator or denominator of a + TimeSeriesFilterRatio. + + Attributes: + filter (str): + Required. The `monitoring + filter `__ + that identifies the metric types, resources, and projects to + query. + aggregation (~.common.Aggregation): + By default, the raw time series data is + returned. Use this field to combine multiple + time series for different views of the data. + """ + + filter = proto.Field(proto.STRING, number=1) + + aggregation = proto.Field(proto.MESSAGE, number=2, message=common.Aggregation,) + + numerator = proto.Field(proto.MESSAGE, number=1, message=RatioPart,) + + denominator = proto.Field(proto.MESSAGE, number=2, message=RatioPart,) + + secondary_aggregation = proto.Field( + proto.MESSAGE, number=3, message=common.Aggregation, + ) + + pick_time_series_filter = proto.Field( + proto.MESSAGE, + number=4, + oneof="output_filter", + message=common.PickTimeSeriesFilter, + ) + + statistical_time_series_filter = proto.Field( + proto.MESSAGE, + number=5, + oneof="output_filter", + message=common.StatisticalTimeSeriesFilter, + ) + + +class Threshold(proto.Message): + r"""Defines a threshold for categorizing time series values. + + Attributes: + label (str): + A label for the threshold. + value (float): + The value of the threshold. The value should + be defined in the native scale of the metric. + color (~.metrics.Threshold.Color): + The state color for this threshold. Color is + not allowed in a XyChart. + direction (~.metrics.Threshold.Direction): + The direction for the current threshold. + Direction is not allowed in a XyChart. + """ + + class Color(proto.Enum): + r"""The color suggests an interpretation to the viewer when + actual values cross the threshold. Comments on each color + provide UX guidance on how users can be expected to interpret a + given state color. + """ + COLOR_UNSPECIFIED = 0 + YELLOW = 4 + RED = 6 + + class Direction(proto.Enum): + r"""Whether the threshold is considered crossed by an actual + value above or below its threshold value. + """ + DIRECTION_UNSPECIFIED = 0 + ABOVE = 1 + BELOW = 2 + + label = proto.Field(proto.STRING, number=1) + + value = proto.Field(proto.DOUBLE, number=2) + + color = proto.Field(proto.ENUM, number=3, enum=Color,) + + direction = proto.Field(proto.ENUM, number=4, enum=Direction,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/monitoring/dashboard_v1/types/scorecard.py b/google/monitoring/dashboard_v1/types/scorecard.py new file mode 100644 index 0000000..dc4dd7d --- /dev/null +++ b/google/monitoring/dashboard_v1/types/scorecard.py @@ -0,0 +1,152 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.monitoring.dashboard_v1.types import metrics +from google.protobuf import duration_pb2 as duration # type: ignore + + +__protobuf__ = proto.module( + package="google.monitoring.dashboard.v1", manifest={"Scorecard",}, +) + + +class Scorecard(proto.Message): + r"""A widget showing the latest value of a metric, and how this + value relates to one or more thresholds. + + Attributes: + time_series_query (~.metrics.TimeSeriesQuery): + Required. Fields for querying time series + data from the Stackdriver metrics API. + gauge_view (~.scorecard.Scorecard.GaugeView): + Will cause the scorecard to show a gauge + chart. + spark_chart_view (~.scorecard.Scorecard.SparkChartView): + Will cause the scorecard to show a spark + chart. + thresholds (Sequence[~.metrics.Threshold]): + The thresholds used to determine the state of + the scorecard given the time series' current + value. For an actual value x, the scorecard is + in a danger state if x is less than or equal to + a danger threshold that triggers below, or + greater than or equal to a danger threshold that + triggers above. Similarly, if x is above/below a + warning threshold that triggers above/below, + then the scorecard is in a warning state - + unless x also puts it in a danger state. (Danger + trumps warning.) + As an example, consider a scorecard with the + following four thresholds: { + value: 90, + category: 'DANGER', + trigger: 'ABOVE', + }, + { + value: 70, + category: 'WARNING', + trigger: 'ABOVE', + }, + { + value: 10, + category: 'DANGER', + trigger: 'BELOW', + }, + { + value: 20, + category: 'WARNING', + trigger: 'BELOW', + } + + Then: values less than or equal to 10 would put + the scorecard in a DANGER state, values greater + than 10 but less than or equal to 20 a WARNING + state, values strictly between 20 and 70 an OK + state, values greater than or equal to 70 but + less than 90 a WARNING state, and values greater + than or equal to 90 a DANGER state. + """ + + class GaugeView(proto.Message): + r"""A gauge chart shows where the current value sits within a + pre-defined range. The upper and lower bounds should define the + possible range of values for the scorecard's query (inclusive). + + Attributes: + lower_bound (float): + The lower bound for this gauge chart. The + value of the chart should always be greater than + or equal to this. + upper_bound (float): + The upper bound for this gauge chart. The + value of the chart should always be less than or + equal to this. + """ + + lower_bound = proto.Field(proto.DOUBLE, number=1) + + upper_bound = proto.Field(proto.DOUBLE, number=2) + + class SparkChartView(proto.Message): + r"""A sparkChart is a small chart suitable for inclusion in a + table-cell or inline in text. This message contains the + configuration for a sparkChart to show up on a Scorecard, + showing recent trends of the scorecard's timeseries. + + Attributes: + spark_chart_type (~.metrics.SparkChartType): + Required. The type of sparkchart to show in + this chartView. + min_alignment_period (~.duration.Duration): + The lower bound on data point frequency in + the chart implemented by specifying the minimum + alignment period to use in a time series query. + For example, if the data is published once every + 10 minutes it would not make sense to fetch and + align data at one minute intervals. This field + is optional and exists only as a hint. + """ + + spark_chart_type = proto.Field( + proto.ENUM, number=1, enum=metrics.SparkChartType, + ) + + min_alignment_period = proto.Field( + proto.MESSAGE, number=2, message=duration.Duration, + ) + + time_series_query = proto.Field( + proto.MESSAGE, number=1, message=metrics.TimeSeriesQuery, + ) + + gauge_view = proto.Field( + proto.MESSAGE, number=4, oneof="data_view", message=GaugeView, + ) + + spark_chart_view = proto.Field( + proto.MESSAGE, number=5, oneof="data_view", message=SparkChartView, + ) + + thresholds = proto.RepeatedField( + proto.MESSAGE, number=6, message=metrics.Threshold, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/monitoring/dashboard_v1/types/service.py b/google/monitoring/dashboard_v1/types/service.py new file mode 100644 index 0000000..7c1c138 --- /dev/null +++ b/google/monitoring/dashboard_v1/types/service.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +__protobuf__ = proto.module(package="google.monitoring.dashboard.v1", manifest={},) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/monitoring/dashboard_v1/types/text.py b/google/monitoring/dashboard_v1/types/text.py new file mode 100644 index 0000000..df10f69 --- /dev/null +++ b/google/monitoring/dashboard_v1/types/text.py @@ -0,0 +1,47 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +__protobuf__ = proto.module( + package="google.monitoring.dashboard.v1", manifest={"Text",}, +) + + +class Text(proto.Message): + r"""A widget that displays textual content. + + Attributes: + content (str): + The text content to be displayed. + format (~.text.Text.Format): + How the text content is formatted. + """ + + class Format(proto.Enum): + r"""The format type of the text content.""" + FORMAT_UNSPECIFIED = 0 + MARKDOWN = 1 + RAW = 2 + + content = proto.Field(proto.STRING, number=1) + + format = proto.Field(proto.ENUM, number=2, enum=Format,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/monitoring/dashboard_v1/types/widget.py b/google/monitoring/dashboard_v1/types/widget.py new file mode 100644 index 0000000..d8707fc --- /dev/null +++ b/google/monitoring/dashboard_v1/types/widget.py @@ -0,0 +1,65 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.monitoring.dashboard_v1.types import scorecard as gmd_scorecard +from google.monitoring.dashboard_v1.types import text as gmd_text +from google.monitoring.dashboard_v1.types import xychart +from google.protobuf import empty_pb2 as empty # type: ignore + + +__protobuf__ = proto.module( + package="google.monitoring.dashboard.v1", manifest={"Widget",}, +) + + +class Widget(proto.Message): + r"""Widget contains a single dashboard component and + configuration of how to present the component in the dashboard. + + Attributes: + title (str): + Optional. The title of the widget. + xy_chart (~.xychart.XyChart): + A chart of time series data. + scorecard (~.gmd_scorecard.Scorecard): + A scorecard summarizing time series data. + text (~.gmd_text.Text): + A raw string or markdown displaying textual + content. + blank (~.empty.Empty): + A blank space. + """ + + title = proto.Field(proto.STRING, number=1) + + xy_chart = proto.Field( + proto.MESSAGE, number=2, oneof="content", message=xychart.XyChart, + ) + + scorecard = proto.Field( + proto.MESSAGE, number=3, oneof="content", message=gmd_scorecard.Scorecard, + ) + + text = proto.Field(proto.MESSAGE, number=4, oneof="content", message=gmd_text.Text,) + + blank = proto.Field(proto.MESSAGE, number=5, oneof="content", message=empty.Empty,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/monitoring/dashboard_v1/types/xychart.py b/google/monitoring/dashboard_v1/types/xychart.py new file mode 100644 index 0000000..d8c284a --- /dev/null +++ b/google/monitoring/dashboard_v1/types/xychart.py @@ -0,0 +1,155 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import proto # type: ignore + + +from google.monitoring.dashboard_v1.types import metrics +from google.protobuf import duration_pb2 as duration # type: ignore + + +__protobuf__ = proto.module( + package="google.monitoring.dashboard.v1", manifest={"XyChart", "ChartOptions",}, +) + + +class XyChart(proto.Message): + r"""A chart that displays data on a 2D (X and Y axes) plane. + + Attributes: + data_sets (Sequence[~.xychart.XyChart.DataSet]): + Required. The data displayed in this chart. + timeshift_duration (~.duration.Duration): + The duration used to display a comparison + chart. A comparison chart simultaneously shows + values from two similar-length time periods + (e.g., week-over-week metrics). + The duration must be positive, and it can only + be applied to charts with data sets of LINE plot + type. + thresholds (Sequence[~.metrics.Threshold]): + Threshold lines drawn horizontally across the + chart. + x_axis (~.xychart.XyChart.Axis): + The properties applied to the X axis. + y_axis (~.xychart.XyChart.Axis): + The properties applied to the Y axis. + chart_options (~.xychart.ChartOptions): + Display options for the chart. + """ + + class DataSet(proto.Message): + r"""Groups a time series query definition with charting options. + + Attributes: + time_series_query (~.metrics.TimeSeriesQuery): + Required. Fields for querying time series + data from the Stackdriver metrics API. + plot_type (~.xychart.XyChart.DataSet.PlotType): + How this data should be plotted on the chart. + legend_template (str): + A template string for naming ``TimeSeries`` in the resulting + data set. This should be a string with interpolations of the + form ``${label_name}``, which will resolve to the label's + value. + min_alignment_period (~.duration.Duration): + Optional. The lower bound on data point frequency for this + data set, implemented by specifying the minimum alignment + period to use in a time series query For example, if the + data is published once every 10 minutes, the + ``min_alignment_period`` should be at least 10 minutes. It + would not make sense to fetch and align data at one minute + intervals. + """ + + class PlotType(proto.Enum): + r"""The types of plotting strategies for data sets.""" + PLOT_TYPE_UNSPECIFIED = 0 + LINE = 1 + STACKED_AREA = 2 + STACKED_BAR = 3 + HEATMAP = 4 + + time_series_query = proto.Field( + proto.MESSAGE, number=1, message=metrics.TimeSeriesQuery, + ) + + plot_type = proto.Field(proto.ENUM, number=2, enum="XyChart.DataSet.PlotType",) + + legend_template = proto.Field(proto.STRING, number=3) + + min_alignment_period = proto.Field( + proto.MESSAGE, number=4, message=duration.Duration, + ) + + class Axis(proto.Message): + r"""A chart axis. + + Attributes: + label (str): + The label of the axis. + scale (~.xychart.XyChart.Axis.Scale): + The axis scale. By default, a linear scale is + used. + """ + + class Scale(proto.Enum): + r"""Types of scales used in axes.""" + SCALE_UNSPECIFIED = 0 + LINEAR = 1 + LOG10 = 2 + + label = proto.Field(proto.STRING, number=1) + + scale = proto.Field(proto.ENUM, number=2, enum="XyChart.Axis.Scale",) + + data_sets = proto.RepeatedField(proto.MESSAGE, number=1, message=DataSet,) + + timeshift_duration = proto.Field( + proto.MESSAGE, number=4, message=duration.Duration, + ) + + thresholds = proto.RepeatedField( + proto.MESSAGE, number=5, message=metrics.Threshold, + ) + + x_axis = proto.Field(proto.MESSAGE, number=6, message=Axis,) + + y_axis = proto.Field(proto.MESSAGE, number=7, message=Axis,) + + chart_options = proto.Field(proto.MESSAGE, number=8, message="ChartOptions",) + + +class ChartOptions(proto.Message): + r"""Options to control visual rendering of a chart. + + Attributes: + mode (~.xychart.ChartOptions.Mode): + The chart mode. + """ + + class Mode(proto.Enum): + r"""Chart mode options.""" + MODE_UNSPECIFIED = 0 + COLOR = 1 + X_RAY = 2 + STATS = 3 + + mode = proto.Field(proto.ENUM, number=1, enum=Mode,) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/mypy.ini b/mypy.ini new file mode 100644 index 0000000..4505b48 --- /dev/null +++ b/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.6 +namespace_packages = True diff --git a/noxfile.py b/noxfile.py index 248fcd3..aa9e3da 100644 --- a/noxfile.py +++ b/noxfile.py @@ -27,8 +27,8 @@ BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" -SYSTEM_TEST_PYTHON_VERSIONS = ["2.7", "3.8"] -UNIT_TEST_PYTHON_VERSIONS = ["2.7", "3.5", "3.6", "3.7", "3.8"] +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] +UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"] @nox.session(python=DEFAULT_PYTHON_VERSION) @@ -70,6 +70,8 @@ def lint_setup_py(session): def default(session): # Install all test dependencies, then install this package in-place. + session.install("asyncmock", "pytest-asyncio") + session.install("mock", "pytest", "pytest-cov") session.install("-e", ".") @@ -135,7 +137,7 @@ def cover(session): test runs (not system test runs), and then erases coverage data. """ session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=79") + session.run("coverage", "report", "--show-missing", "--fail-under=100") session.run("coverage", "erase") @@ -150,7 +152,7 @@ def docs(session): shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) session.run( "sphinx-build", - "-W", # warnings as errors + # "-W", # warnings as errors "-T", # show full traceback on exception "-N", # no colors "-b", diff --git a/samples/AUTHORING_GUIDE.md b/samples/AUTHORING_GUIDE.md new file mode 100644 index 0000000..55c97b3 --- /dev/null +++ b/samples/AUTHORING_GUIDE.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md \ No newline at end of file diff --git a/samples/CONTRIBUTING.md b/samples/CONTRIBUTING.md new file mode 100644 index 0000000..34c882b --- /dev/null +++ b/samples/CONTRIBUTING.md @@ -0,0 +1 @@ +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/CONTRIBUTING.md \ No newline at end of file diff --git a/scripts/fixup_dashboard_v1_keywords.py b/scripts/fixup_dashboard_v1_keywords.py new file mode 100644 index 0000000..e0d2372 --- /dev/null +++ b/scripts/fixup_dashboard_v1_keywords.py @@ -0,0 +1,182 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class dashboardCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_dashboard': ('parent', 'dashboard', ), + 'delete_dashboard': ('name', ), + 'get_dashboard': ('name', ), + 'list_dashboards': ('parent', 'page_size', 'page_token', ), + 'update_dashboard': ('dashboard', ), + + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), + cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=dashboardCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the dashboard client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/setup.py b/setup.py index cab7753..fa0a055 100644 --- a/setup.py +++ b/setup.py @@ -25,8 +25,9 @@ version = "1.0.0" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ - "google-api-core[grpc] >= 1.14.0, < 2.0.0dev", - 'enum34; python_version < "3.4"', + "google-api-core[grpc] >= 1.22.0, < 2.0.0dev", + "proto-plus >= 0.4.0", + "libcst >= 0.2.5", ] package_root = os.path.abspath(os.path.dirname(__file__)) @@ -36,7 +37,9 @@ readme = readme_file.read() packages = [ - package for package in setuptools.find_packages() if package.startswith("google") + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") ] namespaces = ["google"] @@ -58,9 +61,9 @@ "License :: OSI Approved :: Apache Software License", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", "Operating System :: OS Independent", "Topic :: Internet", ], @@ -68,7 +71,8 @@ packages=packages, namespace_packages=namespaces, install_requires=dependencies, - python_requires=">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*", + python_requires=">=3.6", + scripts=["scripts/fixup_dashboard_v1_keywords.py"], include_package_data=True, zip_safe=False, ) diff --git a/synth.metadata b/synth.metadata index 597394c..5914933 100644 --- a/synth.metadata +++ b/synth.metadata @@ -3,30 +3,30 @@ { "git": { "name": ".", - "remote": "https://github.com/googleapis/python-monitoring-dashboards.git", - "sha": "8ed9094df80db87caa9852279be76d69783dc9c3" + "remote": "git@github.com:googleapis/python-monitoring-dashboards.git", + "sha": "bee992e45a3500dfe44089e7e594a906fd675c8d" } }, { "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "184661793fbe3b89f2b485c303e7466cef9d21a1", - "internalRef": "316182409" + "sha": "d399c754bdea83297877ab49e5f66b257a957a78", + "internalRef": "323860336" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "303271797a360f8a439203413f13a160f2f5b3b4" + "sha": "ee7506d15daa3873accfff9430eff7e3953f0248" } }, { "git": { "name": "synthtool", "remote": "https://github.com/googleapis/synthtool.git", - "sha": "303271797a360f8a439203413f13a160f2f5b3b4" + "sha": "ee7506d15daa3873accfff9430eff7e3953f0248" } } ], diff --git a/synth.py b/synth.py index d452233..bf7e99f 100644 --- a/synth.py +++ b/synth.py @@ -28,57 +28,54 @@ version="v1", bazel_target="//google/monitoring/dashboard/v1:monitoring-dashboard-v1-py", include_protos=True, - proto_output_path="google/cloud/monitoring_dashboard/v1/proto", ) +s.move(library / "google/cloud/monitoring_dashboard_v1/proto") +s.move(library / "google/monitoring/dashboard", "google/cloud/monitoring_dashboard") s.move( - library, - excludes=[ - "google/cloud/monitoring_dashboard_v1/proto", # Protos (pb2s) are copied to the incorrect location - "nox.py", - "README.rst", - "setup.py", - "docs/index.rst", - ], + library / "google/monitoring/dashboard_v1", + "google/cloud/monitoring_dashboard_v1" ) +s.move(library / "tests") +s.move(library / "scripts") +s.move(library / "docs", excludes=[library / "docs/index.rst"]) -s.move( - library / "google/cloud/monitoring_dashboard_v1/proto", - "google/cloud/monitoring_dashboard/v1/proto", +# Fix namespace +s.replace( + "google/cloud/**/*.py", + "google.monitoring.dashboard_v1", + "google.cloud.monitoring_dashboard_v1", ) - -# correct license headers -python.fix_pb2_headers() -python.fix_pb2_grpc_headers() - -# Fix imports s.replace( - "google/cloud/**/proto/*_pb2*.py", - "google\.cloud\.monitoring\_dashboard\_v1\.proto", - "google.cloud.monitoring_dashboard.v1.proto", + "tests/unit/gapic/**/*.py", + "google.monitoring.dashboard_v1", + "google.cloud.monitoring_dashboard_v1", ) - -# Fix docstring with trailing backticks s.replace( - "google/cloud/**/dashboards_service_pb2.py", - """ Required\. The resource name of the Dashboard\. The format is `` - "projects/\{project\_id\_or\_number\}/dashboards/\{dashboard\_id\}"``""", - """ Required. The resource name of the Dashboard. The format is - ``"projects/{project_id_or_number}/dashboards/{dashboard_id}"``""", + "docs/**/*.rst", + "google.monitoring.dashboard_v1", + "google.cloud.monitoring_dashboard_v1", ) # Keep cloud in package names for consistency s.replace( "google/**/*.py", "google-cloud-monitoring-dashboard", "google-cloud-monitoring-dashboards" ) + # ---------------------------------------------------------------------------- # Add templated files # ---------------------------------------------------------------------------- -templated_files = common.py_library(cov_level=79) -s.move(templated_files) +templated_files = common.py_library( + samples=False, # set to True only if there are samples + microgenerator=True, +) +s.move(templated_files, excludes=[".coveragerc"]) # microgenerator has a good .coveragerc file # TODO(busunkim): Use latest sphinx after microgenerator transition s.replace("noxfile.py", """['"]sphinx['"]""", '"sphinx<3.0.0"') +# Temporarily disable warnings due to +# https://github.com/googleapis/gapic-generator-python/issues/525 +s.replace("noxfile.py", '[\"\']-W[\"\']', '# "-W"') -s.shell.run(["nox", "-s", "blacken"], hide_output=False) +s.shell.run(["nox", "-s", "blacken"], hide_output=False) \ No newline at end of file diff --git a/tests/unit/gapic/dashboard_v1/__init__.py b/tests/unit/gapic/dashboard_v1/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/tests/unit/gapic/dashboard_v1/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/unit/gapic/dashboard_v1/test_dashboards_service.py b/tests/unit/gapic/dashboard_v1/test_dashboards_service.py new file mode 100644 index 0000000..5a7e1a2 --- /dev/null +++ b/tests/unit/gapic/dashboard_v1/test_dashboards_service.py @@ -0,0 +1,1542 @@ +# -*- coding: utf-8 -*- + +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +import os +import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + +from google import auth +from google.api_core import client_options +from google.api_core import exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.monitoring_dashboard_v1.services.dashboards_service import ( + DashboardsServiceAsyncClient, +) +from google.cloud.monitoring_dashboard_v1.services.dashboards_service import ( + DashboardsServiceClient, +) +from google.cloud.monitoring_dashboard_v1.services.dashboards_service import pagers +from google.cloud.monitoring_dashboard_v1.services.dashboards_service import transports +from google.cloud.monitoring_dashboard_v1.types import common +from google.cloud.monitoring_dashboard_v1.types import dashboard +from google.cloud.monitoring_dashboard_v1.types import dashboards_service +from google.cloud.monitoring_dashboard_v1.types import layouts +from google.cloud.monitoring_dashboard_v1.types import metrics +from google.cloud.monitoring_dashboard_v1.types import scorecard +from google.cloud.monitoring_dashboard_v1.types import scorecard as gmd_scorecard +from google.cloud.monitoring_dashboard_v1.types import text +from google.cloud.monitoring_dashboard_v1.types import text as gmd_text +from google.cloud.monitoring_dashboard_v1.types import widget +from google.cloud.monitoring_dashboard_v1.types import xychart +from google.oauth2 import service_account +from google.protobuf import duration_pb2 as duration # type: ignore + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert DashboardsServiceClient._get_default_mtls_endpoint(None) is None + assert ( + DashboardsServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + DashboardsServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + DashboardsServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DashboardsServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DashboardsServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + + +@pytest.mark.parametrize( + "client_class", [DashboardsServiceClient, DashboardsServiceAsyncClient] +) +def test_dashboards_service_client_from_service_account_file(client_class): + creds = credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client._transport._credentials == creds + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client._transport._credentials == creds + + assert client._transport._host == "monitoring.googleapis.com:443" + + +def test_dashboards_service_client_get_transport_class(): + transport = DashboardsServiceClient.get_transport_class() + assert transport == transports.DashboardsServiceGrpcTransport + + transport = DashboardsServiceClient.get_transport_class("grpc") + assert transport == transports.DashboardsServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DashboardsServiceClient, transports.DashboardsServiceGrpcTransport, "grpc"), + ( + DashboardsServiceAsyncClient, + transports.DashboardsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +@mock.patch.object( + DashboardsServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DashboardsServiceClient), +) +@mock.patch.object( + DashboardsServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DashboardsServiceAsyncClient), +) +def test_dashboards_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DashboardsServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DashboardsServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + api_mtls_endpoint="squid.clam.whelk", + client_cert_source=None, + quota_project_id=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", and client_cert_source is provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=client_cert_source_callback, + quota_project_id=None, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", and default_client_cert_source is provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + ) + + # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is + # "auto", but client_cert_source and default_client_cert_source are None. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DashboardsServiceClient, transports.DashboardsServiceGrpcTransport, "grpc"), + ( + DashboardsServiceAsyncClient, + transports.DashboardsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_dashboards_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions(scopes=["1", "2"],) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DashboardsServiceClient, transports.DashboardsServiceGrpcTransport, "grpc"), + ( + DashboardsServiceAsyncClient, + transports.DashboardsServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + ], +) +def test_dashboards_service_client_client_options_credentials_file( + client_class, transport_class, transport_name +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + api_mtls_endpoint=client.DEFAULT_ENDPOINT, + client_cert_source=None, + quota_project_id=None, + ) + + +def test_dashboards_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.monitoring_dashboard_v1.services.dashboards_service.transports.DashboardsServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = DashboardsServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + api_mtls_endpoint="squid.clam.whelk", + client_cert_source=None, + quota_project_id=None, + ) + + +def test_create_dashboard(transport: str = "grpc"): + client = DashboardsServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dashboards_service.CreateDashboardRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_dashboard), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dashboard.Dashboard( + name="name_value", + display_name="display_name_value", + etag="etag_value", + grid_layout=layouts.GridLayout(columns=769), + ) + + response = client.create_dashboard(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dashboard.Dashboard) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.etag == "etag_value" + + +@pytest.mark.asyncio +async def test_create_dashboard_async(transport: str = "grpc_asyncio"): + client = DashboardsServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dashboards_service.CreateDashboardRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_dashboard), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dashboard.Dashboard( + name="name_value", display_name="display_name_value", etag="etag_value", + ) + ) + + response = await client.create_dashboard(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dashboard.Dashboard) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.etag == "etag_value" + + +def test_create_dashboard_field_headers(): + client = DashboardsServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dashboards_service.CreateDashboardRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.create_dashboard), "__call__" + ) as call: + call.return_value = dashboard.Dashboard() + + client.create_dashboard(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_dashboard_field_headers_async(): + client = DashboardsServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dashboards_service.CreateDashboardRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.create_dashboard), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dashboard.Dashboard()) + + await client.create_dashboard(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_dashboards(transport: str = "grpc"): + client = DashboardsServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dashboards_service.ListDashboardsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_dashboards), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dashboards_service.ListDashboardsResponse( + next_page_token="next_page_token_value", + ) + + response = client.list_dashboards(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDashboardsPager) + + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_dashboards_async(transport: str = "grpc_asyncio"): + client = DashboardsServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dashboards_service.ListDashboardsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_dashboards), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dashboards_service.ListDashboardsResponse( + next_page_token="next_page_token_value", + ) + ) + + response = await client.list_dashboards(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDashboardsAsyncPager) + + assert response.next_page_token == "next_page_token_value" + + +def test_list_dashboards_field_headers(): + client = DashboardsServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dashboards_service.ListDashboardsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_dashboards), "__call__") as call: + call.return_value = dashboards_service.ListDashboardsResponse() + + client.list_dashboards(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_dashboards_field_headers_async(): + client = DashboardsServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dashboards_service.ListDashboardsRequest() + request.parent = "parent/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_dashboards), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dashboards_service.ListDashboardsResponse() + ) + + await client.list_dashboards(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + + +def test_list_dashboards_pager(): + client = DashboardsServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_dashboards), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + dashboards_service.ListDashboardsResponse( + dashboards=[ + dashboard.Dashboard(), + dashboard.Dashboard(), + dashboard.Dashboard(), + ], + next_page_token="abc", + ), + dashboards_service.ListDashboardsResponse( + dashboards=[], next_page_token="def", + ), + dashboards_service.ListDashboardsResponse( + dashboards=[dashboard.Dashboard(),], next_page_token="ghi", + ), + dashboards_service.ListDashboardsResponse( + dashboards=[dashboard.Dashboard(), dashboard.Dashboard(),], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_dashboards(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, dashboard.Dashboard) for i in results) + + +def test_list_dashboards_pages(): + client = DashboardsServiceClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.list_dashboards), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + dashboards_service.ListDashboardsResponse( + dashboards=[ + dashboard.Dashboard(), + dashboard.Dashboard(), + dashboard.Dashboard(), + ], + next_page_token="abc", + ), + dashboards_service.ListDashboardsResponse( + dashboards=[], next_page_token="def", + ), + dashboards_service.ListDashboardsResponse( + dashboards=[dashboard.Dashboard(),], next_page_token="ghi", + ), + dashboards_service.ListDashboardsResponse( + dashboards=[dashboard.Dashboard(), dashboard.Dashboard(),], + ), + RuntimeError, + ) + pages = list(client.list_dashboards(request={}).pages) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_dashboards_async_pager(): + client = DashboardsServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_dashboards), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dashboards_service.ListDashboardsResponse( + dashboards=[ + dashboard.Dashboard(), + dashboard.Dashboard(), + dashboard.Dashboard(), + ], + next_page_token="abc", + ), + dashboards_service.ListDashboardsResponse( + dashboards=[], next_page_token="def", + ), + dashboards_service.ListDashboardsResponse( + dashboards=[dashboard.Dashboard(),], next_page_token="ghi", + ), + dashboards_service.ListDashboardsResponse( + dashboards=[dashboard.Dashboard(), dashboard.Dashboard(),], + ), + RuntimeError, + ) + async_pager = await client.list_dashboards(request={},) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, dashboard.Dashboard) for i in responses) + + +@pytest.mark.asyncio +async def test_list_dashboards_async_pages(): + client = DashboardsServiceAsyncClient(credentials=credentials.AnonymousCredentials,) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.list_dashboards), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + dashboards_service.ListDashboardsResponse( + dashboards=[ + dashboard.Dashboard(), + dashboard.Dashboard(), + dashboard.Dashboard(), + ], + next_page_token="abc", + ), + dashboards_service.ListDashboardsResponse( + dashboards=[], next_page_token="def", + ), + dashboards_service.ListDashboardsResponse( + dashboards=[dashboard.Dashboard(),], next_page_token="ghi", + ), + dashboards_service.ListDashboardsResponse( + dashboards=[dashboard.Dashboard(), dashboard.Dashboard(),], + ), + RuntimeError, + ) + pages = [] + async for page in (await client.list_dashboards(request={})).pages: + pages.append(page) + for page, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page.raw_page.next_page_token == token + + +def test_get_dashboard(transport: str = "grpc"): + client = DashboardsServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dashboards_service.GetDashboardRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_dashboard), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = dashboard.Dashboard( + name="name_value", + display_name="display_name_value", + etag="etag_value", + grid_layout=layouts.GridLayout(columns=769), + ) + + response = client.get_dashboard(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dashboard.Dashboard) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.etag == "etag_value" + + +@pytest.mark.asyncio +async def test_get_dashboard_async(transport: str = "grpc_asyncio"): + client = DashboardsServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dashboards_service.GetDashboardRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_dashboard), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dashboard.Dashboard( + name="name_value", display_name="display_name_value", etag="etag_value", + ) + ) + + response = await client.get_dashboard(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dashboard.Dashboard) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.etag == "etag_value" + + +def test_get_dashboard_field_headers(): + client = DashboardsServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dashboards_service.GetDashboardRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client._transport.get_dashboard), "__call__") as call: + call.return_value = dashboard.Dashboard() + + client.get_dashboard(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_dashboard_field_headers_async(): + client = DashboardsServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dashboards_service.GetDashboardRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.get_dashboard), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dashboard.Dashboard()) + + await client.get_dashboard(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_delete_dashboard(transport: str = "grpc"): + client = DashboardsServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dashboards_service.DeleteDashboardRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_dashboard), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_dashboard(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_dashboard_async(transport: str = "grpc_asyncio"): + client = DashboardsServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dashboards_service.DeleteDashboardRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_dashboard), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + response = await client.delete_dashboard(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_dashboard_field_headers(): + client = DashboardsServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dashboards_service.DeleteDashboardRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.delete_dashboard), "__call__" + ) as call: + call.return_value = None + + client.delete_dashboard(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_dashboard_field_headers_async(): + client = DashboardsServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dashboards_service.DeleteDashboardRequest() + request.name = "name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.delete_dashboard), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + + await client.delete_dashboard(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + + +def test_update_dashboard(transport: str = "grpc"): + client = DashboardsServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dashboards_service.UpdateDashboardRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.update_dashboard), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = dashboard.Dashboard( + name="name_value", + display_name="display_name_value", + etag="etag_value", + grid_layout=layouts.GridLayout(columns=769), + ) + + response = client.update_dashboard(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dashboard.Dashboard) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.etag == "etag_value" + + +@pytest.mark.asyncio +async def test_update_dashboard_async(transport: str = "grpc_asyncio"): + client = DashboardsServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = dashboards_service.UpdateDashboardRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_dashboard), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + dashboard.Dashboard( + name="name_value", display_name="display_name_value", etag="etag_value", + ) + ) + + response = await client.update_dashboard(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, dashboard.Dashboard) + + assert response.name == "name_value" + + assert response.display_name == "display_name_value" + + assert response.etag == "etag_value" + + +def test_update_dashboard_field_headers(): + client = DashboardsServiceClient(credentials=credentials.AnonymousCredentials(),) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dashboards_service.UpdateDashboardRequest() + request.dashboard.name = "dashboard.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._transport.update_dashboard), "__call__" + ) as call: + call.return_value = dashboard.Dashboard() + + client.update_dashboard(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "dashboard.name=dashboard.name/value",) in kw[ + "metadata" + ] + + +@pytest.mark.asyncio +async def test_update_dashboard_field_headers_async(): + client = DashboardsServiceAsyncClient( + credentials=credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = dashboards_service.UpdateDashboardRequest() + request.dashboard.name = "dashboard.name/value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client._client._transport.update_dashboard), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(dashboard.Dashboard()) + + await client.update_dashboard(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ("x-goog-request-params", "dashboard.name=dashboard.name/value",) in kw[ + "metadata" + ] + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DashboardsServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DashboardsServiceClient( + credentials=credentials.AnonymousCredentials(), transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DashboardsServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DashboardsServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DashboardsServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DashboardsServiceClient( + client_options={"scopes": ["1", "2"]}, transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DashboardsServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + client = DashboardsServiceClient(transport=transport) + assert client._transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DashboardsServiceGrpcTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DashboardsServiceGrpcAsyncIOTransport( + credentials=credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DashboardsServiceClient(credentials=credentials.AnonymousCredentials(),) + assert isinstance(client._transport, transports.DashboardsServiceGrpcTransport,) + + +def test_dashboards_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(exceptions.DuplicateCredentialArgs): + transport = transports.DashboardsServiceTransport( + credentials=credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_dashboards_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.monitoring_dashboard_v1.services.dashboards_service.transports.DashboardsServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DashboardsServiceTransport( + credentials=credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_dashboard", + "list_dashboards", + "get_dashboard", + "delete_dashboard", + "update_dashboard", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +def test_dashboards_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object( + auth, "load_credentials_from_file" + ) as load_creds, mock.patch( + "google.cloud.monitoring_dashboard_v1.services.dashboards_service.transports.DashboardsServiceTransport._prep_wrapped_messages" + ) as Transport: + Transport.return_value = None + load_creds.return_value = (credentials.AnonymousCredentials(), None) + transport = transports.DashboardsServiceTransport( + credentials_file="credentials.json", quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/monitoring", + "https://www.googleapis.com/auth/monitoring.read", + "https://www.googleapis.com/auth/monitoring.write", + ), + quota_project_id="octopus", + ) + + +def test_dashboards_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + DashboardsServiceClient() + adc.assert_called_once_with( + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/monitoring", + "https://www.googleapis.com/auth/monitoring.read", + "https://www.googleapis.com/auth/monitoring.write", + ), + quota_project_id=None, + ) + + +def test_dashboards_service_transport_auth_adc(): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(auth, "default") as adc: + adc.return_value = (credentials.AnonymousCredentials(), None) + transports.DashboardsServiceGrpcTransport( + host="squid.clam.whelk", quota_project_id="octopus" + ) + adc.assert_called_once_with( + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/monitoring", + "https://www.googleapis.com/auth/monitoring.read", + "https://www.googleapis.com/auth/monitoring.write", + ), + quota_project_id="octopus", + ) + + +def test_dashboards_service_host_no_port(): + client = DashboardsServiceClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="monitoring.googleapis.com" + ), + ) + assert client._transport._host == "monitoring.googleapis.com:443" + + +def test_dashboards_service_host_with_port(): + client = DashboardsServiceClient( + credentials=credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="monitoring.googleapis.com:8000" + ), + ) + assert client._transport._host == "monitoring.googleapis.com:8000" + + +def test_dashboards_service_grpc_transport_channel(): + channel = grpc.insecure_channel("http://localhost/") + + # Check that if channel is provided, mtls endpoint and client_cert_source + # won't be used. + callback = mock.MagicMock() + transport = transports.DashboardsServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=callback, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert not callback.called + + +def test_dashboards_service_grpc_asyncio_transport_channel(): + channel = aio.insecure_channel("http://localhost/") + + # Check that if channel is provided, mtls endpoint and client_cert_source + # won't be used. + callback = mock.MagicMock() + transport = transports.DashboardsServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=callback, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert not callback.called + + +@mock.patch("grpc.ssl_channel_credentials", autospec=True) +@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) +def test_dashboards_service_grpc_transport_channel_mtls_with_client_cert_source( + grpc_create_channel, grpc_ssl_channel_cred +): + # Check that if channel is None, but api_mtls_endpoint and client_cert_source + # are provided, then a mTLS channel will be created. + mock_cred = mock.Mock() + + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + transport = transports.DashboardsServiceGrpcTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/monitoring", + "https://www.googleapis.com/auth/monitoring.read", + "https://www.googleapis.com/auth/monitoring.write", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@mock.patch("grpc.ssl_channel_credentials", autospec=True) +@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) +def test_dashboards_service_grpc_asyncio_transport_channel_mtls_with_client_cert_source( + grpc_create_channel, grpc_ssl_channel_cred +): + # Check that if channel is None, but api_mtls_endpoint and client_cert_source + # are provided, then a mTLS channel will be created. + mock_cred = mock.Mock() + + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + transport = transports.DashboardsServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/monitoring", + "https://www.googleapis.com/auth/monitoring.read", + "https://www.googleapis.com/auth/monitoring.write", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@pytest.mark.parametrize( + "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] +) +@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True) +def test_dashboards_service_grpc_transport_channel_mtls_with_adc( + grpc_create_channel, api_mtls_endpoint +): + # Check that if channel and client_cert_source are None, but api_mtls_endpoint + # is provided, then a mTLS channel will be created with SSL ADC. + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + # Mock google.auth.transport.grpc.SslCredentials class. + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + mock_cred = mock.Mock() + transport = transports.DashboardsServiceGrpcTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint=api_mtls_endpoint, + client_cert_source=None, + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/monitoring", + "https://www.googleapis.com/auth/monitoring.read", + "https://www.googleapis.com/auth/monitoring.write", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +@pytest.mark.parametrize( + "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"] +) +@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True) +def test_dashboards_service_grpc_asyncio_transport_channel_mtls_with_adc( + grpc_create_channel, api_mtls_endpoint +): + # Check that if channel and client_cert_source are None, but api_mtls_endpoint + # is provided, then a mTLS channel will be created with SSL ADC. + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + # Mock google.auth.transport.grpc.SslCredentials class. + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + mock_cred = mock.Mock() + transport = transports.DashboardsServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint=api_mtls_endpoint, + client_cert_source=None, + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=( + "https://www.googleapis.com/auth/cloud-platform", + "https://www.googleapis.com/auth/monitoring", + "https://www.googleapis.com/auth/monitoring.read", + "https://www.googleapis.com/auth/monitoring.write", + ), + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_dashboard_path(): + project = "squid" + dashboard = "clam" + + expected = "projects/{project}/dashboards/{dashboard}".format( + project=project, dashboard=dashboard, + ) + actual = DashboardsServiceClient.dashboard_path(project, dashboard) + assert expected == actual + + +def test_parse_dashboard_path(): + expected = { + "project": "whelk", + "dashboard": "octopus", + } + path = DashboardsServiceClient.dashboard_path(**expected) + + # Check that the path construction is reversible. + actual = DashboardsServiceClient.parse_dashboard_path(path) + assert expected == actual diff --git a/tests/unit/gapic/v1/test_dashboards_service_client_v1.py b/tests/unit/gapic/v1/test_dashboards_service_client_v1.py deleted file mode 100644 index 1e0f376..0000000 --- a/tests/unit/gapic/v1/test_dashboards_service_client_v1.py +++ /dev/null @@ -1,268 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -"""Unit tests.""" - -import mock -import pytest - -from google.cloud.monitoring_dashboard import v1 -from google.cloud.monitoring_dashboard.v1.proto import dashboard_pb2 -from google.cloud.monitoring_dashboard.v1.proto import dashboards_service_pb2 -from google.protobuf import empty_pb2 - - -class MultiCallableStub(object): - """Stub for the grpc.UnaryUnaryMultiCallable interface.""" - - def __init__(self, method, channel_stub): - self.method = method - self.channel_stub = channel_stub - - def __call__(self, request, timeout=None, metadata=None, credentials=None): - self.channel_stub.requests.append((self.method, request)) - - response = None - if self.channel_stub.responses: - response = self.channel_stub.responses.pop() - - if isinstance(response, Exception): - raise response - - if response: - return response - - -class ChannelStub(object): - """Stub for the grpc.Channel interface.""" - - def __init__(self, responses=[]): - self.responses = responses - self.requests = [] - - def unary_unary(self, method, request_serializer=None, response_deserializer=None): - return MultiCallableStub(method, self) - - -class CustomException(Exception): - pass - - -class TestDashboardsServiceClient(object): - def test_create_dashboard(self): - # Setup Expected Response - name = "name3373707" - display_name = "displayName1615086568" - etag = "etag3123477" - expected_response = {"name": name, "display_name": display_name, "etag": etag} - expected_response = dashboard_pb2.Dashboard(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = v1.DashboardsServiceClient() - - # Setup Request - parent = "parent-995424086" - dashboard = {} - - response = client.create_dashboard(parent, dashboard) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = dashboards_service_pb2.CreateDashboardRequest( - parent=parent, dashboard=dashboard - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_create_dashboard_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = v1.DashboardsServiceClient() - - # Setup request - parent = "parent-995424086" - dashboard = {} - - with pytest.raises(CustomException): - client.create_dashboard(parent, dashboard) - - def test_list_dashboards(self): - # Setup Expected Response - next_page_token = "" - dashboards_element = {} - dashboards = [dashboards_element] - expected_response = { - "next_page_token": next_page_token, - "dashboards": dashboards, - } - expected_response = dashboards_service_pb2.ListDashboardsResponse( - **expected_response - ) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = v1.DashboardsServiceClient() - - # Setup Request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_dashboards(parent) - resources = list(paged_list_response) - assert len(resources) == 1 - - assert expected_response.dashboards[0] == resources[0] - - assert len(channel.requests) == 1 - expected_request = dashboards_service_pb2.ListDashboardsRequest(parent=parent) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_list_dashboards_exception(self): - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = v1.DashboardsServiceClient() - - # Setup request - parent = client.project_path("[PROJECT]") - - paged_list_response = client.list_dashboards(parent) - with pytest.raises(CustomException): - list(paged_list_response) - - def test_get_dashboard(self): - # Setup Expected Response - name_2 = "name2-1052831874" - display_name = "displayName1615086568" - etag = "etag3123477" - expected_response = {"name": name_2, "display_name": display_name, "etag": etag} - expected_response = dashboard_pb2.Dashboard(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = v1.DashboardsServiceClient() - - # Setup Request - name = client.dashboard_path("[PROJECT]", "[DASHBOARD]") - - response = client.get_dashboard(name) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = dashboards_service_pb2.GetDashboardRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_get_dashboard_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = v1.DashboardsServiceClient() - - # Setup request - name = client.dashboard_path("[PROJECT]", "[DASHBOARD]") - - with pytest.raises(CustomException): - client.get_dashboard(name) - - def test_delete_dashboard(self): - channel = ChannelStub() - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = v1.DashboardsServiceClient() - - # Setup Request - name = client.dashboard_path("[PROJECT]", "[DASHBOARD]") - - client.delete_dashboard(name) - - assert len(channel.requests) == 1 - expected_request = dashboards_service_pb2.DeleteDashboardRequest(name=name) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_delete_dashboard_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = v1.DashboardsServiceClient() - - # Setup request - name = client.dashboard_path("[PROJECT]", "[DASHBOARD]") - - with pytest.raises(CustomException): - client.delete_dashboard(name) - - def test_update_dashboard(self): - # Setup Expected Response - name = "name3373707" - display_name = "displayName1615086568" - etag = "etag3123477" - expected_response = {"name": name, "display_name": display_name, "etag": etag} - expected_response = dashboard_pb2.Dashboard(**expected_response) - - # Mock the API response - channel = ChannelStub(responses=[expected_response]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = v1.DashboardsServiceClient() - - # Setup Request - dashboard = {} - - response = client.update_dashboard(dashboard) - assert expected_response == response - - assert len(channel.requests) == 1 - expected_request = dashboards_service_pb2.UpdateDashboardRequest( - dashboard=dashboard - ) - actual_request = channel.requests[0][1] - assert expected_request == actual_request - - def test_update_dashboard_exception(self): - # Mock the API response - channel = ChannelStub(responses=[CustomException()]) - patch = mock.patch("google.api_core.grpc_helpers.create_channel") - with patch as create_channel: - create_channel.return_value = channel - client = v1.DashboardsServiceClient() - - # Setup request - dashboard = {} - - with pytest.raises(CustomException): - client.update_dashboard(dashboard)