Skip to content

Commit

Permalink
fix: pass timeout to 'PartialRowsData.response_iterator' (#16)
Browse files Browse the repository at this point in the history
  • Loading branch information
mf2199 committed Sep 2, 2020
1 parent 92386ec commit 8f76434
Show file tree
Hide file tree
Showing 3 changed files with 19 additions and 3 deletions.
9 changes: 8 additions & 1 deletion google/cloud/bigtable/row_data.py
Expand Up @@ -404,7 +404,14 @@ def __init__(self, read_method, request, retry=DEFAULT_RETRY_READ_ROWS):
self.read_method = read_method
self.request = request
self.retry = retry
self.response_iterator = read_method(request)

# The `timeout` parameter must be somewhat greater than the value
# contained in `self.retry`, in order to avoid race-like condition and
# allow registering the first deadline error before invoking the retry.
# Otherwise there is a risk of entering an infinite loop that resets
# the timeout counter just before it being triggered. The increment
# by 1 second here is customary but should not be much less than that.
self.response_iterator = read_method(request, timeout=self.retry._deadline + 1)

self.rows = {}
self._state = self.STATE_NEW_ROW
Expand Down
9 changes: 8 additions & 1 deletion tests/unit/test_row_data.py
Expand Up @@ -385,10 +385,16 @@ def test_constructor(self):
self.assertEqual(partial_rows_data.retry, DEFAULT_RETRY_READ_ROWS)

def test_constructor_with_retry(self):
from google.cloud.bigtable.row_data import DEFAULT_RETRY_READ_ROWS

client = _Client()
client._data_stub = mock.MagicMock()
request = retry = object()
request = object()
retry = DEFAULT_RETRY_READ_ROWS
partial_rows_data = self._make_one(client._data_stub.ReadRows, request, retry)
partial_rows_data.read_method.assert_called_once_with(
request, timeout=DEFAULT_RETRY_READ_ROWS.deadline + 1
)
self.assertIs(partial_rows_data.request, request)
self.assertEqual(partial_rows_data.rows, {})
self.assertEqual(partial_rows_data.retry, retry)
Expand Down Expand Up @@ -471,6 +477,7 @@ def test_state_new_row_w_row(self):
request = object()

yrd = self._make_one(client._table_data_client.transport.read_rows, request)
self.assertEqual(yrd.retry._deadline, 60.0)

yrd._response_iterator = iterator
rows = [row for row in yrd]
Expand Down
4 changes: 3 additions & 1 deletion tests/unit/test_table.py
Expand Up @@ -654,6 +654,7 @@ def test_read_rows(self):
from google.cloud.bigtable import table as MUT
from google.cloud.bigtable_v2.gapic import bigtable_client
from google.cloud.bigtable_admin_v2.gapic import bigtable_table_admin_client
from google.cloud.bigtable.row_data import DEFAULT_RETRY_READ_ROWS

data_api = bigtable_client.BigtableClient(mock.Mock())
table_api = mock.create_autospec(
Expand All @@ -670,7 +671,8 @@ def test_read_rows(self):
table = self._make_one(self.TABLE_ID, instance, app_profile_id=app_profile_id)

# Create request_pb
request = retry = object() # Returned by our mock.
request = object() # Returned by our mock.
retry = DEFAULT_RETRY_READ_ROWS
mock_created = []

def mock_create_row_request(table_name, **kwargs):
Expand Down

0 comments on commit 8f76434

Please sign in to comment.