Skip to content
This repository has been archived by the owner on Dec 31, 2023. It is now read-only.

Commit

Permalink
fix: fix as_dataframe (#91)
Browse files Browse the repository at this point in the history
  • Loading branch information
busunkim96 committed Mar 12, 2021
1 parent 4cdb1ff commit f135202
Show file tree
Hide file tree
Showing 5 changed files with 71 additions and 34 deletions.
18 changes: 12 additions & 6 deletions google/cloud/monitoring_v3/_dataframe.py
Expand Up @@ -21,14 +21,15 @@
except ImportError: # pragma: NO COVER
pandas = None

from google.cloud.monitoring_v3.types import TimeSeries
from google.cloud import monitoring_v3


TOP_RESOURCE_LABELS = ("project_id", "aws_account", "location", "region", "zone")


def _extract_header(time_series):
"""Return a copy of time_series with the points removed."""
return TimeSeries(
return monitoring_v3.TimeSeries(
metric=time_series.metric,
resource=time_series.resource,
metric_kind=time_series.metric_kind,
Expand All @@ -46,15 +47,19 @@ def _extract_labels(time_series):

def _extract_value(typed_value):
"""Extract the value from a TypedValue."""
value_type = typed_value.WhichOneof("value")
return typed_value.__getattribute__(value_type)
# There is no equivalent of WhichOneOf in proto-plus
# This may break if the field names have been altered in the
# proto-plus representation
# https://github.com/googleapis/proto-plus-python/issues/137
value_type = monitoring_v3.TypedValue.pb(typed_value).WhichOneof("value")
return getattr(typed_value, value_type)


def _build_dataframe(time_series_iterable, label=None, labels=None): # pragma: NO COVER
"""Build a :mod:`pandas` dataframe out of time series.
:type time_series_iterable:
iterable over :class:`~google.cloud.monitoring_v3.types.TimeSeries`
iterable over :class:`~google.cloud.monitoring_v3.TimeSeries`
:param time_series_iterable:
An iterable (e.g., a query object) yielding time series.
Expand Down Expand Up @@ -95,7 +100,8 @@ def _build_dataframe(time_series_iterable, label=None, labels=None): # pragma:
pandas_series = pandas.Series(
data=[_extract_value(point.value) for point in time_series.points],
index=[
point.interval.end_time.ToNanoseconds() for point in time_series.points
point.interval.end_time.timestamp_pb().ToNanoseconds()
for point in time_series.points
],
)
columns.append(pandas_series)
Expand Down
36 changes: 32 additions & 4 deletions noxfile.py
Expand Up @@ -30,6 +30,17 @@
SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"]
UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"]

# 'docfx' is excluded since it only needs to run in 'docs-presubmit'
nox.options.sessions = [
"unit",
"system",
"cover",
"lint",
"lint_setup_py",
"blacken",
"docs",
]


@nox.session(python=DEFAULT_PYTHON_VERSION)
def lint(session):
Expand Down Expand Up @@ -75,12 +86,14 @@ def default(session):
session.install(
"mock", "pytest", "pytest-cov",
)
session.install("-e", ".")

session.install("-e", ".[pandas]")

# Run py.test against the unit tests.
session.run(
"py.test",
"--quiet",
f"--junitxml=unit_{session.python}_sponge_log.xml",
"--cov=google/cloud",
"--cov=tests/unit",
"--cov-append",
Expand Down Expand Up @@ -110,6 +123,9 @@ def system(session):
# Sanity check: Only run tests if the environment variable is set.
if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""):
session.skip("Credentials must be set via environment variable")
# Install pyopenssl for mTLS testing.
if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true":
session.install("pyopenssl")

system_test_exists = os.path.exists(system_test_path)
system_test_folder_exists = os.path.exists(system_test_folder_path)
Expand All @@ -125,13 +141,25 @@ def system(session):
session.install(
"mock", "pytest", "google-cloud-testutils",
)
session.install("-e", ".")
session.install("-e", ".[pandas]")

# Run py.test against the system tests.
if system_test_exists:
session.run("py.test", "--quiet", system_test_path, *session.posargs)
session.run(
"py.test",
"--quiet",
f"--junitxml=system_{session.python}_sponge_log.xml",
system_test_path,
*session.posargs,
)
if system_test_folder_exists:
session.run("py.test", "--quiet", system_test_folder_path, *session.posargs)
session.run(
"py.test",
"--quiet",
f"--junitxml=system_{session.python}_sponge_log.xml",
system_test_folder_path,
*session.posargs,
)


@nox.session(python=DEFAULT_PYTHON_VERSION)
Expand Down
6 changes: 3 additions & 3 deletions samples/snippets/v3/cloud-client/snippets.py
Expand Up @@ -18,8 +18,8 @@
import time
import uuid

from google.api import metric_pb2 as ga_metric
from google.api import label_pb2 as ga_label
from google.api import metric_pb2 as ga_metric
from google.cloud import monitoring_v3


Expand All @@ -35,13 +35,13 @@ def create_metric_descriptor(project_id):
descriptor.metric_kind = ga_metric.MetricDescriptor.MetricKind.GAUGE
descriptor.value_type = ga_metric.MetricDescriptor.ValueType.DOUBLE
descriptor.description = "This is a simple example of a custom metric."

labels = ga_label.LabelDescriptor()
labels.key = "TestLabel"
labels.value_type = ga_label.LabelDescriptor.ValueType.STRING
labels.description = "This is a test label"
descriptor.labels.append(labels)

descriptor = client.create_metric_descriptor(
name=project_name, metric_descriptor=descriptor
)
Expand Down
7 changes: 5 additions & 2 deletions synth.py
Expand Up @@ -93,16 +93,19 @@
templated_files = common.py_library(
samples=True, # set to True only if there are samples
microgenerator=True,
unit_test_extras=["pandas"],
system_test_extras=["pandas"],
cov_level=99
)
s.move(templated_files, excludes=[".coveragerc"]) # microgenerator has a good .coveragerc file

# Don't treat docs (sphinx) warnings as errors.
s.replace("noxfile.py", '[\"\']-W[\"\']', '# "-W"')

# ----------------------------------------------------------------------------
# Samples templates
# ----------------------------------------------------------------------------
python.py_samples(skip_readmes=True)

# Don't treat warnings as errors.
s.replace("noxfile.py", '[\"\']-W[\"\']', '# "-W"')

s.shell.run(["nox", "-s", "blacken"], hide_output=False)
38 changes: 19 additions & 19 deletions tests/unit/test__dataframe.py
Expand Up @@ -12,15 +12,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.

try:
import pandas
except ImportError:
HAVE_PANDAS = False
else:
HAVE_PANDAS = True # pragma: NO COVER

import pandas
import unittest

from google.api import metric_pb2
from google.api import monitored_resource_pb2
from google.api_core import datetime_helpers
from google.cloud import monitoring_v3
from google.cloud.monitoring_v3 import _dataframe


PROJECT = "my-project"

Expand Down Expand Up @@ -58,20 +59,22 @@ def parse_timestamps():


def generate_query_results():
from google.cloud.monitoring_v3 import types

def P(timestamp, value):
interval = types.TimeInterval()
interval.start_time.FromJsonString(timestamp)
interval.end_time.FromJsonString(timestamp)
return types.Point(interval=interval, value={"double_value": value})
interval = monitoring_v3.TimeInterval()
interval.start_time = datetime_helpers.from_rfc3339(timestamp).replace(
tzinfo=None
)
interval.end_time = datetime_helpers.from_rfc3339(timestamp).replace(
tzinfo=None
)
return monitoring_v3.Point(interval=interval, value={"double_value": value})

for metric_labels, resource_labels, value in zip(
METRIC_LABELS, RESOURCE_LABELS, VALUES
):
yield types.TimeSeries(
metric=types.Metric(type=METRIC_TYPE, labels=metric_labels),
resource=types.MonitoredResource(
yield monitoring_v3.TimeSeries(
metric=metric_pb2.Metric(type=METRIC_TYPE, labels=metric_labels),
resource=monitored_resource_pb2.MonitoredResource(
type=RESOURCE_TYPE, labels=resource_labels
),
metric_kind=METRIC_KIND,
Expand All @@ -80,12 +83,9 @@ def P(timestamp, value):
)


@unittest.skipUnless(HAVE_PANDAS, "No pandas")
class Test__build_dataframe(unittest.TestCase):
def _call_fut(self, *args, **kwargs):
from google.cloud.monitoring_v3._dataframe import _build_dataframe

return _build_dataframe(*args, **kwargs)
return _dataframe._build_dataframe(*args, **kwargs)

def test_both_label_and_labels_illegal(self):
with self.assertRaises(ValueError):
Expand Down

0 comments on commit f135202

Please sign in to comment.