Skip to content
This repository has been archived by the owner on Dec 31, 2023. It is now read-only.

fix: fix as_dataframe #91

Merged
merged 8 commits into from Mar 12, 2021
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
19 changes: 11 additions & 8 deletions google/cloud/monitoring_v3/_dataframe.py
Expand Up @@ -21,14 +21,15 @@
except ImportError: # pragma: NO COVER
pandas = None

from google.cloud.monitoring_v3.types import TimeSeries
from google.cloud import monitoring_v3


TOP_RESOURCE_LABELS = ("project_id", "aws_account", "location", "region", "zone")


def _extract_header(time_series):
"""Return a copy of time_series with the points removed."""
return TimeSeries(
return monitoring_v3.TimeSeries(
metric=time_series.metric,
resource=time_series.resource,
metric_kind=time_series.metric_kind,
Expand All @@ -46,15 +47,19 @@ def _extract_labels(time_series):

def _extract_value(typed_value):
"""Extract the value from a TypedValue."""
value_type = typed_value.WhichOneof("value")
return typed_value.__getattribute__(value_type)
# There is no equivalent of WhichOneOf in proto-plus
# This may break if the field names have been altered in the
# proto-plus representation
# https://github.com/googleapis/proto-plus-python/issues/137
value_type = monitoring_v3.TypedValue.pb(typed_value).WhichOneof("value")
return getattr(typed_value, value_type)


def _build_dataframe(time_series_iterable, label=None, labels=None): # pragma: NO COVER
"""Build a :mod:`pandas` dataframe out of time series.

:type time_series_iterable:
iterable over :class:`~google.cloud.monitoring_v3.types.TimeSeries`
iterable over :class:`~google.cloud.monitoring_v3.TimeSeries`
:param time_series_iterable:
An iterable (e.g., a query object) yielding time series.

Expand Down Expand Up @@ -94,9 +99,7 @@ def _build_dataframe(time_series_iterable, label=None, labels=None): # pragma:
for time_series in time_series_iterable:
pandas_series = pandas.Series(
data=[_extract_value(point.value) for point in time_series.points],
index=[
point.interval.end_time.ToNanoseconds() for point in time_series.points
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

],
index=[point.interval.end_time for point in time_series.points],
)
columns.append(pandas_series)
headers.append(_extract_header(time_series))
Expand Down
36 changes: 32 additions & 4 deletions noxfile.py
Expand Up @@ -30,6 +30,17 @@
SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"]
UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"]

# 'docfx' is excluded since it only needs to run in 'docs-presubmit'
nox.options.sessions = [
"unit",
"system",
"cover",
"lint",
"lint_setup_py",
"blacken",
"docs",
]


@nox.session(python=DEFAULT_PYTHON_VERSION)
def lint(session):
Expand Down Expand Up @@ -75,12 +86,14 @@ def default(session):
session.install(
"mock", "pytest", "pytest-cov",
)
session.install("-e", ".")

session.install("-e", ".[pandas]")

# Run py.test against the unit tests.
session.run(
"py.test",
"--quiet",
f"--junitxml=unit_{session.python}_sponge_log.xml",
"--cov=google/cloud",
"--cov=tests/unit",
"--cov-append",
Expand Down Expand Up @@ -110,6 +123,9 @@ def system(session):
# Sanity check: Only run tests if the environment variable is set.
if not os.environ.get("GOOGLE_APPLICATION_CREDENTIALS", ""):
session.skip("Credentials must be set via environment variable")
# Install pyopenssl for mTLS testing.
if os.environ.get("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true":
session.install("pyopenssl")

system_test_exists = os.path.exists(system_test_path)
system_test_folder_exists = os.path.exists(system_test_folder_path)
Expand All @@ -125,13 +141,25 @@ def system(session):
session.install(
"mock", "pytest", "google-cloud-testutils",
)
session.install("-e", ".")
session.install("-e", ".[pandas]")

# Run py.test against the system tests.
if system_test_exists:
session.run("py.test", "--quiet", system_test_path, *session.posargs)
session.run(
"py.test",
"--quiet",
f"--junitxml=system_{session.python}_sponge_log.xml",
system_test_path,
*session.posargs,
)
if system_test_folder_exists:
session.run("py.test", "--quiet", system_test_folder_path, *session.posargs)
session.run(
"py.test",
"--quiet",
f"--junitxml=system_{session.python}_sponge_log.xml",
system_test_folder_path,
*session.posargs,
)


@nox.session(python=DEFAULT_PYTHON_VERSION)
Expand Down
7 changes: 5 additions & 2 deletions synth.py
Expand Up @@ -93,16 +93,19 @@
templated_files = common.py_library(
samples=True, # set to True only if there are samples
microgenerator=True,
unit_test_extras=["pandas"],
system_test_extras=["pandas"],
cov_level=99
)
s.move(templated_files, excludes=[".coveragerc"]) # microgenerator has a good .coveragerc file

# Don't treat warnings as errors.
busunkim96 marked this conversation as resolved.
Show resolved Hide resolved
s.replace("noxfile.py", '[\"\']-W[\"\']', '# "-W"')

# ----------------------------------------------------------------------------
# Samples templates
# ----------------------------------------------------------------------------
python.py_samples(skip_readmes=True)

# Don't treat warnings as errors.
s.replace("noxfile.py", '[\"\']-W[\"\']', '# "-W"')

s.shell.run(["nox", "-s", "blacken"], hide_output=False)
42 changes: 20 additions & 22 deletions tests/unit/test__dataframe.py
Expand Up @@ -12,15 +12,16 @@
# See the License for the specific language governing permissions and
# limitations under the License.

try:
import pandas
except ImportError:
HAVE_PANDAS = False
else:
HAVE_PANDAS = True # pragma: NO COVER

import pandas
import unittest

from google.api import metric_pb2
from google.api import monitored_resource_pb2
from google.api_core import datetime_helpers
from google.cloud import monitoring_v3
from google.cloud.monitoring_v3 import _dataframe


PROJECT = "my-project"

Expand Down Expand Up @@ -52,26 +53,26 @@


def parse_timestamps():
from google.api_core import datetime_helpers

return [datetime_helpers.from_rfc3339(t).replace(tzinfo=None) for t in TIMESTAMPS]
return [pandas.Timestamp(t) for t in TIMESTAMPS]
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

pandas seems to have its own timestamp type (https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.Timestamp.html), so I'm not sure how these tests used to work 馃



def generate_query_results():
from google.cloud.monitoring_v3 import types

def P(timestamp, value):
interval = types.TimeInterval()
interval.start_time.FromJsonString(timestamp)
interval.end_time.FromJsonString(timestamp)
return types.Point(interval=interval, value={"double_value": value})
interval = monitoring_v3.TimeInterval()
interval.start_time = datetime_helpers.from_rfc3339(timestamp).replace(
tzinfo=None
)
interval.end_time = datetime_helpers.from_rfc3339(timestamp).replace(
tzinfo=None
)
return monitoring_v3.Point(interval=interval, value={"double_value": value})

for metric_labels, resource_labels, value in zip(
METRIC_LABELS, RESOURCE_LABELS, VALUES
):
yield types.TimeSeries(
metric=types.Metric(type=METRIC_TYPE, labels=metric_labels),
resource=types.MonitoredResource(
yield monitoring_v3.TimeSeries(
metric=metric_pb2.Metric(type=METRIC_TYPE, labels=metric_labels),
resource=monitored_resource_pb2.MonitoredResource(
type=RESOURCE_TYPE, labels=resource_labels
),
metric_kind=METRIC_KIND,
Expand All @@ -80,12 +81,9 @@ def P(timestamp, value):
)


@unittest.skipUnless(HAVE_PANDAS, "No pandas")
class Test__build_dataframe(unittest.TestCase):
def _call_fut(self, *args, **kwargs):
from google.cloud.monitoring_v3._dataframe import _build_dataframe

return _build_dataframe(*args, **kwargs)
return _dataframe._build_dataframe(*args, **kwargs)

def test_both_label_and_labels_illegal(self):
with self.assertRaises(ValueError):
Expand Down