Skip to content

Commit

Permalink
Use ARROW as data format in DB API cursor
Browse files Browse the repository at this point in the history
  • Loading branch information
plamut committed May 13, 2020
1 parent 530a5db commit 5b82c76
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
2 changes: 1 addition & 1 deletion google/cloud/bigquery/dbapi/cursor.py
Expand Up @@ -296,7 +296,7 @@ def _bqstorage_fetch(self, bqstorage_client):
else:
requested_session = bigquery_storage_v1.types.ReadSession(
table=table_reference.to_bqstorage(),
data_format=bigquery_storage_v1.enums.DataFormat.AVRO,
data_format=bigquery_storage_v1.enums.DataFormat.ARROW,
)
read_session = bqstorage_client.create_read_session(
parent="projects/{}".format(table_reference.project),
Expand Down
4 changes: 2 additions & 2 deletions tests/system.py
Expand Up @@ -1667,7 +1667,7 @@ def test_dbapi_fetch_w_bqstorage_client_small_result_set(self):
@unittest.skipIf(
bigquery_storage_v1 is None, "Requires `google-cloud-bigquery-storage`"
)
@unittest.skipIf(fastavro is None, "Requires `fastavro`")
@unittest.skipIf(pyarrow is None, "Requires `pyarrow`")
def test_dbapi_fetch_w_bqstorage_client_large_result_set(self):
bqstorage_client = bigquery_storage_v1.BigQueryReadClient(
credentials=Config.CLIENT._credentials
Expand Down Expand Up @@ -1717,7 +1717,7 @@ def test_dbapi_fetch_w_bqstorage_client_large_result_set(self):
@unittest.skipIf(
bigquery_storage_v1beta1 is None, "Requires `google-cloud-bigquery-storage`"
)
@unittest.skipIf(fastavro is None, "Requires `fastavro`")
@unittest.skipIf(pyarrow is None, "Requires `pyarrow`")
def test_dbapi_fetch_w_bqstorage_client_v1beta1_large_result_set(self):
bqstorage_client = bigquery_storage_v1beta1.BigQueryStorageClient(
credentials=Config.CLIENT._credentials
Expand Down

0 comments on commit 5b82c76

Please sign in to comment.