diff --git a/google/cloud/bigquery/dbapi/cursor.py b/google/cloud/bigquery/dbapi/cursor.py index 597313fd6..74f8aec4e 100644 --- a/google/cloud/bigquery/dbapi/cursor.py +++ b/google/cloud/bigquery/dbapi/cursor.py @@ -229,7 +229,6 @@ def _try_fetch(self, size=None): return if self._query_data is None: - client = self.connection._client bqstorage_client = self.connection._bqstorage_client if bqstorage_client is not None: @@ -237,11 +236,7 @@ def _try_fetch(self, size=None): self._query_data = _helpers.to_bq_table_rows(rows_iterable) return - rows_iter = client.list_rows( - self._query_job.destination, - selected_fields=self._query_job._query_results.schema, - page_size=self.arraysize, - ) + rows_iter = self._query_job.result(page_size=self.arraysize) self._query_data = iter(rows_iter) def _bqstorage_fetch(self, bqstorage_client): diff --git a/tests/unit/test_dbapi_cursor.py b/tests/unit/test_dbapi_cursor.py index 5c3bfcae9..f55b3fd3f 100644 --- a/tests/unit/test_dbapi_cursor.py +++ b/tests/unit/test_dbapi_cursor.py @@ -66,8 +66,8 @@ def _mock_client( num_dml_affected_rows=num_dml_affected_rows, dry_run=dry_run_job, total_bytes_processed=total_bytes_processed, + rows=rows, ) - mock_client.list_rows.return_value = rows mock_client._default_query_job_config = default_query_job_config # Assure that the REST client gets used, not the BQ Storage client. @@ -102,9 +102,13 @@ def _mock_job( num_dml_affected_rows=None, dry_run=False, total_bytes_processed=0, + rows=None, ): from google.cloud.bigquery import job + if rows is None: + rows = [] + mock_job = mock.create_autospec(job.QueryJob) mock_job.error_result = None mock_job.state = "DONE" @@ -114,7 +118,7 @@ def _mock_job( mock_job.result.side_effect = exceptions.NotFound mock_job.total_bytes_processed = total_bytes_processed else: - mock_job.result.return_value = mock_job + mock_job.result.return_value = rows mock_job._query_results = self._mock_results( total_rows=total_rows, schema=schema,