Skip to content

Commit

Permalink
chore: restore coverage (almost) to 100% (#225)
Browse files Browse the repository at this point in the history
Note that the synthtool-generated `.coveragerc` (see #224) does *not* include all changes needed for 100% coverage:  see:

- googleapis/gapic-generator-python#171
- googleapis/gapic-generator-python#437

Closes #92.
Closes #195.
  • Loading branch information
tseaver committed Oct 23, 2020
1 parent db5f286 commit e8f6c4d
Show file tree
Hide file tree
Showing 14 changed files with 37 additions and 212 deletions.
21 changes: 2 additions & 19 deletions google/cloud/firestore_v1/_helpers.py
Expand Up @@ -644,20 +644,6 @@ def __init__(self, document_data) -> None:
self.transform_merge = []
self.merge = []

@property
def has_updates(self):
# for whatever reason, the conformance tests want to see the parent
# of nested transform paths in the update mask
# (see set-st-merge-nonleaf-alone.textproto)
update_paths = set(self.data_merge)

for transform_path in self.transform_paths:
if len(transform_path.parts) > 1:
parent_fp = FieldPath(*transform_path.parts[:-1])
update_paths.add(parent_fp)

return bool(update_paths)

def _apply_merge_all(self) -> None:
self.data_merge = sorted(self.field_paths + self.deleted_fields)
# TODO: other transforms
Expand Down Expand Up @@ -771,8 +757,7 @@ def _get_update_mask(
if field_path not in self.transform_merge
]

if mask_paths or allow_empty_mask:
return common.DocumentMask(field_paths=mask_paths)
return common.DocumentMask(field_paths=mask_paths)


def pbs_for_set_with_merge(
Expand All @@ -794,10 +779,8 @@ def pbs_for_set_with_merge(
extractor = DocumentExtractorForMerge(document_data)
extractor.apply_merge(merge)

merge_empty = not document_data
allow_empty_mask = merge_empty or extractor.transform_paths
set_pb = extractor.get_update_pb(document_path)

set_pb = extractor.get_update_pb(document_path, allow_empty_mask=allow_empty_mask)
if extractor.transform_paths:
field_transform_pbs = extractor.get_field_transform_pbs(document_path)
set_pb.update_transforms.extend(field_transform_pbs)
Expand Down
20 changes: 2 additions & 18 deletions google/cloud/firestore_v1/async_client.py
Expand Up @@ -284,24 +284,8 @@ async def collections(
request=request, metadata=self._rpc_metadata, **kwargs,
)

while True:
for i in iterator.collection_ids:
yield self.collection(i)
if iterator.next_page_token:
next_request = request.copy()
next_request["page_token"] = iterator.next_page_token
iterator = await self._firestore_api.list_collection_ids(
request=next_request, metadata=self._rpc_metadata, **kwargs,
)
else:
return

# TODO(microgen): currently this method is rewritten to iterate/page itself.
# https://github.com/googleapis/gapic-generator-python/issues/516
# it seems the generator ought to be able to do this itself.
# iterator.client = self
# iterator.item_to_value = _item_to_collection_ref
# return iterator
async for collection_id in iterator:
yield self.collection(collection_id)

def batch(self) -> AsyncWriteBatch:
"""Get a batch instance from this client.
Expand Down
19 changes: 2 additions & 17 deletions google/cloud/firestore_v1/async_document.py
Expand Up @@ -407,20 +407,5 @@ async def collections(
request=request, metadata=self._client._rpc_metadata, **kwargs,
)

while True:
for i in iterator.collection_ids:
yield self.collection(i)
if iterator.next_page_token:
next_request = request.copy()
next_request["page_token"] = iterator.next_page_token
iterator = await self._client._firestore_api.list_collection_ids(
request=request, metadata=self._client._rpc_metadata, **kwargs
)
else:
return

# TODO(microgen): currently this method is rewritten to iterate/page itself.
# it seems the generator ought to be able to do this itself.
# iterator.document = self
# iterator.item_to_value = _item_to_collection_ref
# return iterator
async for collection_id in iterator:
yield self.collection(collection_id)
11 changes: 0 additions & 11 deletions google/cloud/firestore_v1/base_client.py
Expand Up @@ -536,17 +536,6 @@ def _get_doc_mask(field_paths: Iterable[str]) -> Optional[types.common.DocumentM
return types.DocumentMask(field_paths=field_paths)


def _item_to_collection_ref(iterator, item: str) -> Any:
"""Convert collection ID to collection ref.
Args:
iterator (google.api_core.page_iterator.GRPCIterator):
iterator response
item (str): ID of the collection
"""
return iterator.client.collection(item)


def _path_helper(path: tuple) -> Any:
"""Standardize path into a tuple of path segments.
Expand Down
11 changes: 0 additions & 11 deletions google/cloud/firestore_v1/base_document.py
Expand Up @@ -567,14 +567,3 @@ def _first_write_result(write_results: list) -> Any:
raise ValueError("Expected at least one write result")

return write_results[0]


def _item_to_collection_ref(iterator, item: str) -> Any:
"""Convert collection ID to collection ref.
Args:
iterator (google.api_core.page_iterator.GRPCIterator):
iterator response
item (str): ID of the collection
"""
return iterator.document.collection(item)
20 changes: 2 additions & 18 deletions google/cloud/firestore_v1/client.py
Expand Up @@ -280,24 +280,8 @@ def collections(
request=request, metadata=self._rpc_metadata, **kwargs,
)

while True:
for i in iterator.collection_ids:
yield self.collection(i)
if iterator.next_page_token:
next_request = request.copy()
next_request["page_token"] = iterator.next_page_token
iterator = self._firestore_api.list_collection_ids(
request=next_request, metadata=self._rpc_metadata, **kwargs,
)
else:
return

# TODO(microgen): currently this method is rewritten to iterate/page itself.
# https://github.com/googleapis/gapic-generator-python/issues/516
# it seems the generator ought to be able to do this itself.
# iterator.client = self
# iterator.item_to_value = _item_to_collection_ref
# return iterator
for collection_id in iterator:
yield self.collection(collection_id)

def batch(self) -> WriteBatch:
"""Get a batch instance from this client.
Expand Down
19 changes: 2 additions & 17 deletions google/cloud/firestore_v1/document.py
Expand Up @@ -408,23 +408,8 @@ def collections(
request=request, metadata=self._client._rpc_metadata, **kwargs,
)

while True:
for i in iterator.collection_ids:
yield self.collection(i)
if iterator.next_page_token:
next_request = request.copy()
next_request["page_token"] = iterator.next_page_token
iterator = self._client._firestore_api.list_collection_ids(
request=request, metadata=self._client._rpc_metadata, **kwargs
)
else:
return

# TODO(microgen): currently this method is rewritten to iterate/page itself.
# it seems the generator ought to be able to do this itself.
# iterator.document = self
# iterator.item_to_value = _item_to_collection_ref
# return iterator
for collection_id in iterator:
yield self.collection(collection_id)

def on_snapshot(self, callback: Callable) -> Watch:
"""Watch this document.
Expand Down
8 changes: 0 additions & 8 deletions tests/unit/v1/test__helpers.py
Expand Up @@ -1728,7 +1728,6 @@ def test_apply_merge_all_w_empty_document(self):
self.assertEqual(inst.data_merge, [])
self.assertEqual(inst.transform_merge, [])
self.assertEqual(inst.merge, [])
self.assertFalse(inst.has_updates)

def test_apply_merge_all_w_delete(self):
from google.cloud.firestore_v1.transforms import DELETE_FIELD
Expand All @@ -1745,7 +1744,6 @@ def test_apply_merge_all_w_delete(self):
self.assertEqual(inst.data_merge, expected_data_merge)
self.assertEqual(inst.transform_merge, [])
self.assertEqual(inst.merge, expected_data_merge)
self.assertTrue(inst.has_updates)

def test_apply_merge_all_w_server_timestamp(self):
from google.cloud.firestore_v1.transforms import SERVER_TIMESTAMP
Expand All @@ -1761,7 +1759,6 @@ def test_apply_merge_all_w_server_timestamp(self):
self.assertEqual(inst.data_merge, expected_data_merge)
self.assertEqual(inst.transform_merge, expected_transform_merge)
self.assertEqual(inst.merge, expected_merge)
self.assertTrue(inst.has_updates)

def test_apply_merge_list_fields_w_empty_document(self):
document_data = {}
Expand Down Expand Up @@ -1800,7 +1797,6 @@ def test_apply_merge_list_fields_w_delete(self):
expected_deleted_fields = [_make_field_path("delete_me")]
self.assertEqual(inst.set_fields, expected_set_fields)
self.assertEqual(inst.deleted_fields, expected_deleted_fields)
self.assertTrue(inst.has_updates)

def test_apply_merge_list_fields_w_prefixes(self):

Expand All @@ -1827,7 +1823,6 @@ def test_apply_merge_list_fields_w_non_merge_field(self):

expected_set_fields = {"write_me": "value"}
self.assertEqual(inst.set_fields, expected_set_fields)
self.assertTrue(inst.has_updates)

def test_apply_merge_list_fields_w_server_timestamp(self):
from google.cloud.firestore_v1.transforms import SERVER_TIMESTAMP
Expand All @@ -1849,7 +1844,6 @@ def test_apply_merge_list_fields_w_server_timestamp(self):
self.assertEqual(inst.merge, expected_merge)
expected_server_timestamps = [_make_field_path("timestamp")]
self.assertEqual(inst.server_timestamps, expected_server_timestamps)
self.assertTrue(inst.has_updates)

def test_apply_merge_list_fields_w_array_remove(self):
from google.cloud.firestore_v1.transforms import ArrayRemove
Expand All @@ -1872,7 +1866,6 @@ def test_apply_merge_list_fields_w_array_remove(self):
self.assertEqual(inst.merge, expected_merge)
expected_array_removes = {_make_field_path("remove_me"): values}
self.assertEqual(inst.array_removes, expected_array_removes)
self.assertTrue(inst.has_updates)

def test_apply_merge_list_fields_w_array_union(self):
from google.cloud.firestore_v1.transforms import ArrayUnion
Expand All @@ -1895,7 +1888,6 @@ def test_apply_merge_list_fields_w_array_union(self):
self.assertEqual(inst.merge, expected_merge)
expected_array_unions = {_make_field_path("union_me"): values}
self.assertEqual(inst.array_unions, expected_array_unions)
self.assertTrue(inst.has_updates)


class Test_pbs_for_set_with_merge(unittest.TestCase):
Expand Down
28 changes: 9 additions & 19 deletions tests/unit/v1/test_async_client.py
Expand Up @@ -196,33 +196,23 @@ def test_document_factory_w_nested_path(self):
self.assertIsInstance(document2, AsyncDocumentReference)

async def _collections_helper(self, retry=None, timeout=None):
from google.api_core.page_iterator import Iterator
from google.api_core.page_iterator import Page
from google.cloud.firestore_v1.async_collection import AsyncCollectionReference
from google.cloud.firestore_v1 import _helpers

collection_ids = ["users", "projects"]
client = self._make_default_one()
firestore_api = AsyncMock()
firestore_api.mock_add_spec(spec=["list_collection_ids"])
client._firestore_api_internal = firestore_api

# TODO(microgen): list_collection_ids isn't a pager.
# https://github.com/googleapis/gapic-generator-python/issues/516
class _Iterator(Iterator):
def __init__(self, pages):
super(_Iterator, self).__init__(client=None)
self._pages = pages
self.collection_ids = pages[0]
class Pager(object):
async def __aiter__(self, **_):
for collection_id in collection_ids:
yield collection_id

def _next_page(self):
if self._pages:
page, self._pages = self._pages[0], self._pages[1:]
return Page(self, page, self.item_to_value)
firestore_api = AsyncMock()
firestore_api.mock_add_spec(spec=["list_collection_ids"])
firestore_api.list_collection_ids.return_value = Pager()

client = self._make_default_one()
client._firestore_api_internal = firestore_api
kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
iterator = _Iterator(pages=[collection_ids])
firestore_api.list_collection_ids.return_value = iterator

collections = [c async for c in client.collections(**kwargs)]

Expand Down
21 changes: 6 additions & 15 deletions tests/unit/v1/test_async_document.py
Expand Up @@ -497,27 +497,18 @@ async def test_get_with_transaction(self):
@pytest.mark.asyncio
async def _collections_helper(self, page_size=None, retry=None, timeout=None):
from google.cloud.firestore_v1 import _helpers
from google.api_core.page_iterator import Iterator
from google.api_core.page_iterator import Page
from google.cloud.firestore_v1.async_collection import AsyncCollectionReference

# TODO(microgen): https://github.com/googleapis/gapic-generator-python/issues/516
class _Iterator(Iterator):
def __init__(self, pages):
super(_Iterator, self).__init__(client=None)
self._pages = pages
self.collection_ids = pages[0]
collection_ids = ["coll-1", "coll-2"]

def _next_page(self):
if self._pages:
page, self._pages = self._pages[0], self._pages[1:]
return Page(self, page, self.item_to_value)
class Pager(object):
async def __aiter__(self, **_):
for collection_id in collection_ids:
yield collection_id

collection_ids = ["coll-1", "coll-2"]
iterator = _Iterator(pages=[collection_ids])
firestore_api = AsyncMock()
firestore_api.mock_add_spec(spec=["list_collection_ids"])
firestore_api.list_collection_ids.return_value = iterator
firestore_api.list_collection_ids.return_value = Pager()

client = _make_client()
client._firestore_api_internal = firestore_api
Expand Down
10 changes: 0 additions & 10 deletions tests/unit/v1/test_async_query.py
Expand Up @@ -25,16 +25,6 @@
)


class MockAsyncIter:
def __init__(self, count=3):
# count is arbitrary value
self.count = count

async def __aiter__(self, **_):
for i in range(self.count):
yield i


class TestAsyncQuery(aiounittest.AsyncTestCase):
@staticmethod
def _get_target_class():
Expand Down
39 changes: 7 additions & 32 deletions tests/unit/v1/test_client.py
Expand Up @@ -195,31 +195,20 @@ def test_document_factory_w_nested_path(self):

def _collections_helper(self, retry=None, timeout=None):
from google.cloud.firestore_v1 import _helpers
from google.api_core.page_iterator import Iterator
from google.api_core.page_iterator import Page
from google.cloud.firestore_v1.collection import CollectionReference

collection_ids = ["users", "projects"]
client = self._make_default_one()
firestore_api = mock.Mock(spec=["list_collection_ids"])
client._firestore_api_internal = firestore_api

# TODO(microgen): list_collection_ids isn't a pager.
# https://github.com/googleapis/gapic-generator-python/issues/516
class _Iterator(Iterator):
def __init__(self, pages):
super(_Iterator, self).__init__(client=None)
self._pages = pages
self.collection_ids = pages[0]
class Pager(object):
def __iter__(self):
yield from collection_ids

def _next_page(self):
if self._pages:
page, self._pages = self._pages[0], self._pages[1:]
return Page(self, page, self.item_to_value)
firestore_api = mock.Mock(spec=["list_collection_ids"])
firestore_api.list_collection_ids.return_value = Pager()

client = self._make_default_one()
client._firestore_api_internal = firestore_api
kwargs = _helpers.make_retry_timeout_kwargs(retry, timeout)
iterator = _Iterator(pages=[collection_ids])
firestore_api.list_collection_ids.return_value = iterator

collections = list(client.collections(**kwargs))

Expand Down Expand Up @@ -259,20 +248,6 @@ def _invoke_get_all(self, client, references, document_pbs, **kwargs):

return list(snapshots)

def _info_for_get_all(self, data1, data2):
client = self._make_default_one()
document1 = client.document("pineapple", "lamp1")
document2 = client.document("pineapple", "lamp2")

# Make response protobufs.
document_pb1, read_time = _doc_get_info(document1._document_path, data1)
response1 = _make_batch_response(found=document_pb1, read_time=read_time)

document, read_time = _doc_get_info(document2._document_path, data2)
response2 = _make_batch_response(found=document, read_time=read_time)

return client, document1, document2, response1, response2

def _get_all_helper(self, num_snapshots=2, txn_id=None, retry=None, timeout=None):
from google.cloud.firestore_v1 import _helpers
from google.cloud.firestore_v1.types import common
Expand Down

0 comments on commit e8f6c4d

Please sign in to comment.