Skip to content

Commit

Permalink
tests: remove warning spew (#197)
Browse files Browse the repository at this point in the history
Fixes: #196
  • Loading branch information
tseaver committed Jul 30, 2020
1 parent 8fe7254 commit 8360487
Show file tree
Hide file tree
Showing 5 changed files with 112 additions and 33 deletions.
21 changes: 18 additions & 3 deletions tests/unit/test__pandas_helpers.py
Expand Up @@ -20,6 +20,7 @@
import warnings

import mock
import six

try:
import pandas
Expand Down Expand Up @@ -299,7 +300,10 @@ def test_bq_to_arrow_data_type_w_struct(module_under_test, bq_type):
)
)
assert pyarrow.types.is_struct(actual)
assert actual.num_children == len(fields)
try:
assert actual.num_fields == len(fields)
except AttributeError: # py27
assert actual.num_children == len(fields)
assert actual.equals(expected)


Expand Down Expand Up @@ -344,7 +348,10 @@ def test_bq_to_arrow_data_type_w_array_struct(module_under_test, bq_type):
)
assert pyarrow.types.is_list(actual)
assert pyarrow.types.is_struct(actual.value_type)
assert actual.value_type.num_children == len(fields)
try:
assert actual.value_type.num_fields == len(fields)
except AttributeError: # py27
assert actual.value_type.num_children == len(fields)
assert actual.value_type.equals(expected_value_type)


Expand Down Expand Up @@ -542,9 +549,17 @@ def test_bq_to_arrow_schema_w_unknown_type(module_under_test):
# instead.
schema.SchemaField("field3", "UNKNOWN_TYPE"),
)
actual = module_under_test.bq_to_arrow_schema(fields)
with warnings.catch_warnings(record=True) as warned:
actual = module_under_test.bq_to_arrow_schema(fields)
assert actual is None

if six.PY3:
assert len(warned) == 1
warning = warned[0]
assert "field3" in str(warning)
else:
assert len(warned) == 0


@pytest.mark.skipif(pandas is None, reason="Requires `pandas`")
def test_get_column_or_index_not_found(module_under_test):
Expand Down
25 changes: 17 additions & 8 deletions tests/unit/test_client.py
Expand Up @@ -221,7 +221,8 @@ def test__call_api_applying_custom_retry_on_timeout(self):
from concurrent.futures import TimeoutError
from google.cloud.bigquery.retry import DEFAULT_RETRY

client = self._make_one(project=self.PROJECT)
creds = _make_credentials()
client = self._make_one(project=self.PROJECT, credentials=creds)

api_request_patcher = mock.patch.object(
client._connection, "api_request", side_effect=[TimeoutError, "result"],
Expand Down Expand Up @@ -674,7 +675,8 @@ def test_create_bqstorage_client(self):
mock_client.assert_called_once_with(credentials=creds)

def test_create_bqstorage_client_missing_dependency(self):
client = self._make_one(project=self.PROJECT)
creds = _make_credentials()
client = self._make_one(project=self.PROJECT, credentials=creds)

def fail_bqstorage_import(name, globals, locals, fromlist, level):
# NOTE: *very* simplified, assuming a straightforward absolute import
Expand Down Expand Up @@ -7680,17 +7682,24 @@ def test_load_table_from_dataframe_wo_pyarrow_custom_compression(self):
)

with load_patch, get_table_patch, pyarrow_patch, to_parquet_patch as to_parquet_spy:
client.load_table_from_dataframe(
dataframe,
self.TABLE_REF,
location=self.LOCATION,
parquet_compression="gzip",
)
with warnings.catch_warnings(record=True) as warned:
client.load_table_from_dataframe(
dataframe,
self.TABLE_REF,
location=self.LOCATION,
parquet_compression="gzip",
)

call_args = to_parquet_spy.call_args
assert call_args is not None
assert call_args.kwargs.get("compression") == "gzip"

assert len(warned) == 2
warning = warned[0]
assert "Loading dataframe data without pyarrow" in str(warning)
warning = warned[1]
assert "Please install the pyarrow package" in str(warning)

@unittest.skipIf(pandas is None, "Requires `pandas`")
@unittest.skipIf(pyarrow is None, "Requires `pyarrow`")
def test_load_table_from_dataframe_w_nulls(self):
Expand Down
46 changes: 33 additions & 13 deletions tests/unit/test_job.py
Expand Up @@ -17,6 +17,7 @@
import json
import textwrap
import unittest
import warnings

import freezegun
import mock
Expand Down Expand Up @@ -1834,26 +1835,34 @@ def test_time_partitioning_hit(self):
"expirationMs": str(year_ms),
"requirePartitionFilter": False,
}
expected = TimePartitioning(
type_=TimePartitioningType.DAY,
field=field,
expiration_ms=year_ms,
require_partition_filter=False,
)
with warnings.catch_warnings(record=True) as warned:
expected = TimePartitioning(
type_=TimePartitioningType.DAY,
field=field,
expiration_ms=year_ms,
require_partition_filter=False,
)
self.assertEqual(config.time_partitioning, expected)

assert len(warned) == 1
warning = warned[0]
assert "TimePartitioning.require_partition_filter" in str(warning)

def test_time_partitioning_setter(self):
from google.cloud.bigquery.table import TimePartitioning
from google.cloud.bigquery.table import TimePartitioningType

field = "creation_date"
year_ms = 86400 * 1000 * 365
time_partitioning = TimePartitioning(
type_=TimePartitioningType.DAY,
field=field,
expiration_ms=year_ms,
require_partition_filter=False,
)

with warnings.catch_warnings(record=True) as warned:
time_partitioning = TimePartitioning(
type_=TimePartitioningType.DAY,
field=field,
expiration_ms=year_ms,
require_partition_filter=False,
)

config = self._get_target_class()()
config.time_partitioning = time_partitioning
expected = {
Expand All @@ -1864,6 +1873,10 @@ def test_time_partitioning_setter(self):
}
self.assertEqual(config._properties["load"]["timePartitioning"], expected)

assert len(warned) == 1
warning = warned[0]
assert "TimePartitioning.require_partition_filter" in str(warning)

def test_time_partitioning_setter_w_none(self):
from google.cloud.bigquery.table import TimePartitioningType

Expand Down Expand Up @@ -5595,7 +5608,10 @@ def test_to_dataframe_column_date_dtypes_wo_pyarrow(self):
job = self._make_one(self.JOB_ID, self.QUERY, client)

with mock.patch("google.cloud.bigquery.table.pyarrow", None):
df = job.to_dataframe(date_as_object=False, create_bqstorage_client=False)
with warnings.catch_warnings(record=True) as warned:
df = job.to_dataframe(
date_as_object=False, create_bqstorage_client=False
)

self.assertIsInstance(df, pandas.DataFrame)
self.assertEqual(len(df), 1) # verify the number of rows
Expand All @@ -5604,6 +5620,10 @@ def test_to_dataframe_column_date_dtypes_wo_pyarrow(self):

self.assertEqual(df.date.dtype.name, "object")

assert len(warned) == 1
warning = warned[0]
assert "without pyarrow" in str(warning)

@unittest.skipIf(pandas is None, "Requires `pandas`")
@unittest.skipIf(tqdm is None, "Requires `tqdm`")
@mock.patch("tqdm.tqdm")
Expand Down
6 changes: 3 additions & 3 deletions tests/unit/test_magics.py
Expand Up @@ -399,7 +399,7 @@ def test_bigquery_magic_without_optional_arguments(monkeypatch):

# Set up the context with monkeypatch so that it's reset for subsequent
# tests.
monkeypatch.setattr(magics.context, "credentials", mock_credentials)
monkeypatch.setattr(magics.context, "_credentials", mock_credentials)

# Mock out the BigQuery Storage API.
bqstorage_mock = mock.create_autospec(bigquery_storage_v1.BigQueryReadClient)
Expand Down Expand Up @@ -560,7 +560,7 @@ def test_bigquery_magic_with_bqstorage_from_argument(monkeypatch):

# Set up the context with monkeypatch so that it's reset for subsequent
# tests.
monkeypatch.setattr(magics.context, "credentials", mock_credentials)
monkeypatch.setattr(magics.context, "_credentials", mock_credentials)

# Mock out the BigQuery Storage API.
bqstorage_mock = mock.create_autospec(bigquery_storage_v1.BigQueryReadClient)
Expand Down Expand Up @@ -624,7 +624,7 @@ def test_bigquery_magic_with_rest_client_requested(monkeypatch):

# Set up the context with monkeypatch so that it's reset for subsequent
# tests.
monkeypatch.setattr(magics.context, "credentials", mock_credentials)
monkeypatch.setattr(magics.context, "_credentials", mock_credentials)

# Mock out the BigQuery Storage API.
bqstorage_mock = mock.create_autospec(bigquery_storage_v1.BigQueryReadClient)
Expand Down
47 changes: 41 additions & 6 deletions tests/unit/test_table.py
Expand Up @@ -1778,7 +1778,8 @@ def test_to_arrow_w_unknown_type(self):
api_request = mock.Mock(return_value={"rows": rows})
row_iterator = self._make_one(_mock_client(), api_request, path, schema)

tbl = row_iterator.to_arrow(create_bqstorage_client=False)
with warnings.catch_warnings(record=True) as warned:
tbl = row_iterator.to_arrow(create_bqstorage_client=False)

self.assertIsInstance(tbl, pyarrow.Table)
self.assertEqual(tbl.num_rows, 2)
Expand All @@ -1799,6 +1800,10 @@ def test_to_arrow_w_unknown_type(self):
self.assertEqual(ages, [33, 29])
self.assertEqual(sports, ["volleyball", "basketball"])

self.assertEqual(len(warned), 1)
warning = warned[0]
self.assertTrue("sport" in str(warning))

@unittest.skipIf(pyarrow is None, "Requires `pyarrow`")
def test_to_arrow_w_empty_table(self):
from google.cloud.bigquery.schema import SchemaField
Expand Down Expand Up @@ -2370,13 +2375,18 @@ def test_to_dataframe_progress_bar_wo_pyarrow(
for progress_bar_type, progress_bar_mock in progress_bars:
row_iterator = self._make_one(_mock_client(), api_request, path, schema)
with mock.patch("google.cloud.bigquery.table.pyarrow", None):
df = row_iterator.to_dataframe(progress_bar_type=progress_bar_type)
with warnings.catch_warnings(record=True) as warned:
df = row_iterator.to_dataframe(progress_bar_type=progress_bar_type)

progress_bar_mock.assert_called()
progress_bar_mock().update.assert_called()
progress_bar_mock().close.assert_called_once()
self.assertEqual(len(df), 4)

self.assertEqual(len(warned), 1)
warning = warned[0]
self.assertTrue("without pyarrow" in str(warning))

@unittest.skipIf(pandas is None, "Requires `pandas`")
@mock.patch("google.cloud.bigquery.table.tqdm", new=None)
def test_to_dataframe_no_tqdm_no_progress_bar(self):
Expand Down Expand Up @@ -2499,12 +2509,17 @@ def test_to_dataframe_w_empty_results_wo_pyarrow(self):
api_request = mock.Mock(return_value={"rows": []})
row_iterator = self._make_one(_mock_client(), api_request, schema=schema)

df = row_iterator.to_dataframe()
with warnings.catch_warnings(record=True) as warned:
df = row_iterator.to_dataframe()

self.assertIsInstance(df, pandas.DataFrame)
self.assertEqual(len(df), 0) # verify the number of rows
self.assertEqual(list(df), ["name", "age"]) # verify the column names

self.assertEqual(len(warned), 1)
warning = warned[0]
self.assertTrue("without pyarrow" in str(warning))

@unittest.skipIf(pandas is None, "Requires `pandas`")
def test_to_dataframe_w_no_results_wo_pyarrow(self):
from google.cloud.bigquery.schema import SchemaField
Expand All @@ -2522,12 +2537,17 @@ def empty_iterable(dtypes=None):

row_iterator.to_dataframe_iterable = empty_iterable

df = row_iterator.to_dataframe()
with warnings.catch_warnings(record=True) as warned:
df = row_iterator.to_dataframe()

self.assertIsInstance(df, pandas.DataFrame)
self.assertEqual(len(df), 0) # verify the number of rows
self.assertEqual(list(df), ["name", "age"]) # verify the column names

self.assertEqual(len(warned), 1)
warning = warned[0]
self.assertTrue("without pyarrow" in str(warning))

@unittest.skipIf(pandas is None, "Requires `pandas`")
def test_to_dataframe_w_various_types_nullable(self):
import datetime
Expand Down Expand Up @@ -2787,11 +2807,19 @@ def test_to_dataframe_w_bqstorage_v1beta1_no_streams(self):
table=mut.TableReference.from_string("proj.dset.tbl"),
)

got = row_iterator.to_dataframe(bqstorage_client)
with warnings.catch_warnings(record=True) as warned:
got = row_iterator.to_dataframe(bqstorage_client)

column_names = ["colA", "colC", "colB"]
self.assertEqual(list(got), column_names)
self.assertTrue(got.empty)

self.assertEqual(len(warned), 1)
warning = warned[0]
self.assertTrue(
"Support for BigQuery Storage v1beta1 clients is deprecated" in str(warning)
)

@unittest.skipIf(
bigquery_storage_v1 is None, "Requires `google-cloud-bigquery-storage`"
)
Expand Down Expand Up @@ -3493,7 +3521,10 @@ def test_to_dataframe_concat_categorical_dtype_wo_pyarrow(self):

row_iterator = self._make_one(_mock_client(), api_request, path, schema)

with mock.patch("google.cloud.bigquery.table.pyarrow", None):
mock_pyarrow = mock.patch("google.cloud.bigquery.table.pyarrow", None)
catch_warnings = warnings.catch_warnings(record=True)

with mock_pyarrow, catch_warnings as warned:
got = row_iterator.to_dataframe(
dtypes={
"col_category": pandas.core.dtypes.dtypes.CategoricalDtype(
Expand Down Expand Up @@ -3522,6 +3553,10 @@ def test_to_dataframe_concat_categorical_dtype_wo_pyarrow(self):
["low", "medium", "low", "medium", "high", "low"],
)

self.assertEqual(len(warned), 1)
warning = warned[0]
self.assertTrue("without pyarrow" in str(warning))


class TestPartitionRange(unittest.TestCase):
def _get_target_class(self):
Expand Down

0 comments on commit 8360487

Please sign in to comment.