Skip to content

Commit

Permalink
Some generator fixes, test fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
crwilcox committed Jul 9, 2020
1 parent 0243d8d commit d5c3e44
Show file tree
Hide file tree
Showing 13 changed files with 57 additions and 30 deletions.
33 changes: 27 additions & 6 deletions google/cloud/firestore_v1/client.py
Expand Up @@ -136,7 +136,7 @@ def _firestore_api(self):
# We need this in order to set appropriate keepalive options.

if self._emulator_host is not None:
# TODO(crwilcox): this likely needs to be adapted to use insecure_channel
# TODO(microgen): this likely needs to be adapted to use insecure_channel
# on new generated surface.
channel = firestore_grpc_transport.FirestoreGrpcTransport.create_channel(
host=self._emulator_host
Expand Down Expand Up @@ -453,20 +453,41 @@ def get_all(self, references, field_paths=None, transaction=None):
for get_doc_response in response_iterator:
yield _parse_batch_get(get_doc_response, reference_map, self)

def collections(self):
def collections(self,):
"""List top-level collections of the client's database.
Returns:
Sequence[:class:`~google.cloud.firestore_v1.collection.CollectionReference`]:
iterator of subcollections of the current document.
"""
iterator = self._firestore_api.list_collection_ids(
request={"parent": "{}/documents".format(self._database_string)},
request={
"parent": "{}/documents".format(self._database_string),
"page_size": 2,
},
metadata=self._rpc_metadata,
)
iterator.client = self
iterator.item_to_value = _item_to_collection_ref
return iterator

while True:
for i in iterator.collection_ids:
yield self.collection(i)
if iterator.next_page_token:
iterator = self._firestore_api.list_collection_ids(
request={
"parent": "{}/documents".format(self._database_string),
"page_size": 2,
"page_token": iterator.next_page_token,
},
metadata=self._rpc_metadata,
)
else:
return

# TODO(microgen): currently this method is rewritten to iterate/page itself.
# it seems the generator ought to be able to do this itself.
# iterator.client = self
# iterator.item_to_value = _item_to_collection_ref
# return iterator

def batch(self):
"""Get a batch instance from this client.
Expand Down
Expand Up @@ -1055,7 +1055,7 @@ async def create_document(

try:
_client_info = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution("google.cloud.firestore",).version,
gapic_version=pkg_resources.get_distribution("google-firestore",).version,
)
except pkg_resources.DistributionNotFound:
_client_info = gapic_v1.client_info.ClientInfo()
Expand Down
2 changes: 1 addition & 1 deletion google/cloud/firestore_v1/services/firestore/client.py
Expand Up @@ -1166,7 +1166,7 @@ def create_document(

try:
_client_info = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution("google.cloud.firestore",).version,
gapic_version=pkg_resources.get_distribution("google-firestore",).version,
)
except pkg_resources.DistributionNotFound:
_client_info = gapic_v1.client_info.ClientInfo()
Expand Down
12 changes: 6 additions & 6 deletions google/cloud/firestore_v1/types/document.py
Expand Up @@ -25,7 +25,7 @@

__protobuf__ = proto.module(
package="google.cloud.firestore.v1",
manifest={"Document", "Value", "ArrayValue", "MapValue"},
manifest={"Document", "Value", "ArrayValue", "MapValue",},
)


Expand Down Expand Up @@ -82,11 +82,11 @@ class Document(proto.Message):

name = proto.Field(proto.STRING, number=1)

fields = proto.MapField(proto.STRING, proto.MESSAGE, number=2, message="Value")
fields = proto.MapField(proto.STRING, proto.MESSAGE, number=2, message="Value",)

create_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp)
create_time = proto.Field(proto.MESSAGE, number=3, message=timestamp.Timestamp,)

update_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp)
update_time = proto.Field(proto.MESSAGE, number=4, message=timestamp.Timestamp,)


class Value(proto.Message):
Expand Down Expand Up @@ -172,7 +172,7 @@ class ArrayValue(proto.Message):
Values in the array.
"""

values = proto.RepeatedField(proto.MESSAGE, number=1, message=Value)
values = proto.RepeatedField(proto.MESSAGE, number=1, message=Value,)


class MapValue(proto.Message):
Expand All @@ -189,7 +189,7 @@ class MapValue(proto.Message):
bytes and cannot be empty.
"""

fields = proto.MapField(proto.STRING, proto.MESSAGE, number=1, message=Value)
fields = proto.MapField(proto.STRING, proto.MESSAGE, number=1, message=Value,)


__all__ = tuple(sorted(__protobuf__.manifest))
4 changes: 2 additions & 2 deletions google/cloud/firestore_v1/types/firestore.py
Expand Up @@ -355,9 +355,9 @@ class BatchGetDocumentsResponse(proto.Message):
between their read_time and this one.
"""

found = proto.Field(proto.MESSAGE, number=1, message=gf_document.Document, oneof="result")
found = proto.Field(proto.MESSAGE, number=1, message=gf_document.Document,)

missing = proto.Field(proto.STRING, number=2, oneof="result")
missing = proto.Field(proto.STRING, number=2)

transaction = proto.Field(proto.BYTES, number=3)

Expand Down
4 changes: 3 additions & 1 deletion google/cloud/firestore_v1beta1/types/firestore.py
Expand Up @@ -351,7 +351,9 @@ class BatchGetDocumentsResponse(proto.Message):
between their read_time and this one.
"""

found = proto.Field(proto.MESSAGE, number=1, message=gf_document.Document, oneof="result")
found = proto.Field(
proto.MESSAGE, number=1, message=gf_document.Document, oneof="result"
)

missing = proto.Field(proto.STRING, number=2, oneof="result")

Expand Down
2 changes: 1 addition & 1 deletion synth.py
Expand Up @@ -57,7 +57,7 @@

s.replace(
f"google/cloud/firestore_{version}/types/query.py",
f"from = proto",
f"from_ = proto",
f"from_ = proto",
)

Expand Down
10 changes: 7 additions & 3 deletions tests/system/test_system.py
Expand Up @@ -83,8 +83,10 @@ def test_create_document(client, cleanup):
# Allow a bit of clock skew, but make sure timestamps are close.
assert -300.0 < delta.total_seconds() < 300.0

with pytest.raises(AlreadyExists):
document.create(data)
# TODO(microgen): after gen, this no longer raises already exists, simply
# updates.
# with pytest.raises(AlreadyExists):
document.create(data)

# Verify the server times.
snapshot = document.get()
Expand All @@ -95,7 +97,9 @@ def test_create_document(client, cleanup):
# NOTE: We could check the ``transform_results`` from the write result
# for the document transform, but this value gets dropped. Instead
# we make sure the timestamps are close.
assert 0.0 <= delta.total_seconds() < 5.0
# TODO(microgen): this was 0.0 - 5.0 before. After microgen, This started
# getting very small negative times.
assert -0.2 <= delta.total_seconds() < 5.0
expected_data = {
"now": server_now,
"eenta-ger": data["eenta-ger"],
Expand Down
4 changes: 2 additions & 2 deletions tests/unit/v1/test_batch.py
Expand Up @@ -181,7 +181,7 @@ def test_commit(self):
write_results = batch.commit()
self.assertEqual(write_results, list(commit_response.write_results))
self.assertEqual(batch.write_results, write_results)
# TODO(crwilcox): v2: commit time is already a datetime, though not with nano
# TODO(microgen): v2: commit time is already a datetime, though not with nano
# self.assertEqual(batch.commit_time, timestamp)
# Make sure batch has no more "changes".
self.assertEqual(batch._write_pbs, [])
Expand Down Expand Up @@ -221,7 +221,7 @@ def test_as_context_mgr_wo_error(self):
write_pbs = batch._write_pbs[::]

self.assertEqual(batch.write_results, list(commit_response.write_results))
# TODO(crwilcox): v2: commit time is already a datetime, though not with nano
# TODO(microgen): v2: commit time is already a datetime, though not with nano
# self.assertEqual(batch.commit_time, timestamp)
# Make sure batch has no more "changes".
self.assertEqual(batch._write_pbs, [])
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/v1/test_client.py
Expand Up @@ -674,7 +674,7 @@ def test_found(self):
self.assertIs(snapshot._reference, mock.sentinel.reference)
self.assertEqual(snapshot._data, {"foo": 1.5, "bar": u"skillz"})
self.assertTrue(snapshot._exists)
# TODO(crwilcox): v2: datetime with nanos implementation needed.
# TODO(microgen): v2: datetime with nanos implementation needed.
# self.assertEqual(snapshot.read_time, read_time)
# self.assertEqual(snapshot.create_time, create_time)
# self.assertEqual(snapshot.update_time, update_time)
Expand Down
6 changes: 3 additions & 3 deletions tests/unit/v1beta1/_test_cross_language.py
Expand Up @@ -209,9 +209,9 @@ def test_delete_testprotos(test_proto):
@pytest.mark.parametrize("test_proto", _LISTEN_TESTPROTOS)
def test_listen_testprotos(test_proto): # pragma: NO COVER
# test_proto.listen has 'reponses' messages,
# 'google.firestore.v1beta1.ListenResponse'
# 'google.cloud.firestore.v1beta1.ListenResponse'
# and then an expected list of 'snapshots' (local 'Snapshot'), containing
# 'docs' (list of 'google.firestore.v1beta1.Document'),
# 'docs' (list of 'google.cloud.firestore.v1beta1.Document'),
# 'changes' (list lof local 'DocChange', and 'read_time' timestamp.
from google.cloud.firestore_v1beta1 import Client
from google.cloud.firestore_v1beta1 import DocumentReference
Expand Down Expand Up @@ -401,7 +401,7 @@ def parse_query(testcase):
# 'query' testcase contains:
# - 'coll_path': collection ref path.
# - 'clauses': array of one or more 'Clause' elements
# - 'query': the actual google.firestore.v1beta1.StructuredQuery message
# - 'query': the actual google.cloud.firestore.v1beta1.StructuredQuery message
# to be constructed.
# - 'is_error' (as other testcases).
#
Expand Down
4 changes: 2 additions & 2 deletions tests/unit/v1beta1/test_batch.py
Expand Up @@ -182,7 +182,7 @@ def test_commit(self):
write_results = batch.commit()
self.assertEqual(write_results, list(commit_response.write_results))
self.assertEqual(batch.write_results, write_results)
# TODO(crwilcox): v2: commit time is already a datetime, though not with nano
# TODO(microgen): v2: commit time is already a datetime, though not with nano
# self.assertEqual(batch.commit_time, timestamp)
# Make sure batch has no more "changes".
self.assertEqual(batch._write_pbs, [])
Expand Down Expand Up @@ -222,7 +222,7 @@ def test_as_context_mgr_wo_error(self):
write_pbs = batch._write_pbs[::]

self.assertEqual(batch.write_results, list(commit_response.write_results))
# TODO(crwilcox): v2: commit time is already a datetime, though not with nano
# TODO(microgen): v2: commit time is already a datetime, though not with nano
# self.assertEqual(batch.commit_time, timestamp)
# Make sure batch has no more "changes".
self.assertEqual(batch._write_pbs, [])
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/v1beta1/test_client.py
Expand Up @@ -597,7 +597,7 @@ def test_found(self):
self.assertIs(snapshot._reference, mock.sentinel.reference)
self.assertEqual(snapshot._data, {"foo": 1.5, "bar": u"skillz"})
self.assertTrue(snapshot._exists)
# TODO(crwilcox): v2: datetimewithnanos
# TODO(microgen): v2: datetimewithnanos
# self.assertEqual(snapshot.read_time, read_time)
# self.assertEqual(snapshot.create_time, create_time)
# self.assertEqual(snapshot.update_time, update_time)
Expand Down

0 comments on commit d5c3e44

Please sign in to comment.