Skip to content

Commit

Permalink
fix: changes for system tests
Browse files Browse the repository at this point in the history
  • Loading branch information
crwilcox committed Oct 15, 2020
1 parent be9ea01 commit da8300d
Show file tree
Hide file tree
Showing 10 changed files with 56 additions and 51 deletions.
2 changes: 1 addition & 1 deletion docs/admin_client.rst
@@ -1,6 +1,6 @@
Datastore Admin Client
======================

.. automodule:: google.cloud.datastore_admin_v1.gapic.datastore_admin_client
.. automodule:: google.cloud.datastore_admin_v1.services.datastore_admin.client
:members:
:show-inheritance:
9 changes: 7 additions & 2 deletions google/cloud/datastore/_gapic.py
Expand Up @@ -19,7 +19,8 @@

from google.cloud._helpers import make_secure_channel
from google.cloud._http import DEFAULT_USER_AGENT
from google.cloud.datastore_v1.gapic import datastore_client
from google.cloud.datastore_v1.services.datastore import client as datastore_client
from google.cloud.datastore_v1.services.datastore.transports import grpc


def make_datastore_api(client):
Expand All @@ -38,6 +39,10 @@ def make_datastore_api(client):
else:
channel = insecure_channel(host)

# from .base import DatastoreTransport

transport = grpc.DatastoreGrpcTransport(channel=channel)

return datastore_client.DatastoreClient(
channel=channel, client_info=client._client_info
transport=transport, client_info=client._client_info
)
2 changes: 1 addition & 1 deletion google/cloud/datastore/_http.py
Expand Up @@ -110,7 +110,7 @@ def _rpc(http, project, method, base_url, client_info, request_pb, response_pb_c
"""
req_data = request_pb.SerializeToString()
response = _request(http, project, method, req_data, base_url, client_info)
return response_pb_cls._meta._pb.FromString(response)
return response_pb_cls.deserialize(response)


def build_api_url(project, method, base_url):
Expand Down
14 changes: 9 additions & 5 deletions google/cloud/datastore/batch.py
Expand Up @@ -255,8 +255,14 @@ def _commit(self, retry, timeout):
kwargs["timeout"] = timeout

commit_response_pb = self._client._datastore_api.commit(
self.project, mode, self._mutations, transaction=self._id, **kwargs
)
request={
"project_id": self.project,
"mode": mode,
"transaction": self._id,
"mutations": self._mutations,
**kwargs,
},
)._pb
_, updated_keys = _parse_commit_response(commit_response_pb)
# If the back-end returns without error, we are guaranteed that
# ``commit`` will return keys that match (length and
Expand Down Expand Up @@ -355,8 +361,6 @@ def _parse_commit_response(commit_response_pb):
mut_results = commit_response_pb.mutation_results
index_updates = commit_response_pb.index_updates
completed_keys = [
mut_result.key
for mut_result in mut_results
if mut_result.HasField("key")
mut_result.key for mut_result in mut_results if mut_result.HasField("key")
] # Message field (Key)
return index_updates, completed_keys
24 changes: 18 additions & 6 deletions google/cloud/datastore/client.py
Expand Up @@ -185,7 +185,12 @@ def _extended_lookup(
while loop_num < _MAX_LOOPS: # loop against possible deferred.
loop_num += 1
lookup_response = datastore_api.lookup(
project, key_pbs, read_options=read_options, **kwargs
request={
"project_id": project,
"keys": key_pbs,
"read_options": read_options,
**kwargs,
},
)

# Accumulate the new results.
Expand Down Expand Up @@ -535,7 +540,7 @@ def get_multi(
helpers.key_from_protobuf(deferred_pb) for deferred_pb in deferred
]

return [helpers.entity_from_protobuf(entity_pb) for entity_pb in entity_pbs]
return [helpers.entity_from_protobuf(entity_pb._pb) for entity_pb in entity_pbs]

def put(self, entity, retry=None, timeout=None):
"""Save an entity in the Cloud Datastore.
Expand Down Expand Up @@ -702,7 +707,11 @@ def allocate_ids(self, incomplete_key, num_ids, retry=None, timeout=None):
kwargs = _make_retry_timeout_kwargs(retry, timeout)

response_pb = self._datastore_api.allocate_ids(
incomplete_key.project, incomplete_key_pbs, **kwargs
request={
"project_id": incomplete_key.project,
"keys": incomplete_key_pbs,
**kwargs,
},
)
allocated_ids = [
allocated_key_pb.path[-1].id for allocated_key_pb in response_pb.keys
Expand Down Expand Up @@ -871,8 +880,9 @@ def reserve_ids_sequential(self, complete_key, num_ids, retry=None, timeout=None
key_pbs.append(key.to_protobuf())

kwargs = _make_retry_timeout_kwargs(retry, timeout)
self._datastore_api.reserve_ids(complete_key.project, key_pbs, **kwargs)

self._datastore_api.reserve_ids(
request={"project_id": complete_key.project, "keys": key_pbs, **kwargs}
)
return None

def reserve_ids(self, complete_key, num_ids, retry=None, timeout=None):
Expand Down Expand Up @@ -921,6 +931,8 @@ def reserve_ids_multi(self, complete_keys, retry=None, timeout=None):

kwargs = _make_retry_timeout_kwargs(retry, timeout)
key_pbs = [key.to_protobuf() for key in complete_keys]
self._datastore_api.reserve_ids(complete_keys[0].project, key_pbs, **kwargs)
self._datastore_api.reserve_ids(
request={"project_id": complete_keys[0].project, "keys": key_pbs, **kwargs}
)

return None
5 changes: 2 additions & 3 deletions google/cloud/datastore/helpers.py
Expand Up @@ -393,7 +393,7 @@ def _get_value_from_value_pb(value_pb):
:raises: :class:`ValueError <exceptions.ValueError>` if no value type
has been set.
"""
if getattr(value_pb, '_pb', False):
if getattr(value_pb, "_pb", False):
# TODO(microgenerator): fix inconsistent calling.
value_pb = value_pb._pb

Expand Down Expand Up @@ -430,8 +430,7 @@ def _get_value_from_value_pb(value_pb):

elif value_type == "geo_point_value":
result = GeoPoint(
value_pb.geo_point_value.latitude,
value_pb.geo_point_value.longitude,
value_pb.geo_point_value.latitude, value_pb.geo_point_value.longitude,
)

elif value_type == "null_value":
Expand Down
45 changes: 15 additions & 30 deletions google/cloud/datastore/query.py
Expand Up @@ -575,22 +575,14 @@ def _next_page(self):
if self._timeout is not None:
kwargs["timeout"] = self._timeout

# response_pb = self.client._datastore_api.run_query(
# request={
# "project_id": self._query.project,
# "partition_id": partition_id,
# "read_options": read_options,
# "query": query_pb,
# # "gql_query": query_pb,
# },
# **kwargs
# )
response_pb = self.client._datastore_api.run_query(
self._query.project,
partition_id,
read_options,
query=query_pb,
**kwargs
request={
"project_id": self._query.project,
"partition_id": partition_id,
"read_options": read_options,
"query": query_pb,
**kwargs,
}
)

while (
Expand All @@ -603,21 +595,14 @@ def _next_page(self):
# more than 1000 skipped results in a query.
query_pb.start_cursor = response_pb.batch.skipped_cursor
query_pb.offset -= response_pb.batch.skipped_results
# response_pb = self.client._datastore_api.run_query(
# request={
# "project_id": self._query.project,
# "partition_id": partition_id,
# "read_options": read_options,
# "query": kwargs,
# "gql_query": query_pb,
# }
# )
response_pb = self.client._datastore_api.run_query(
self._query.project,
partition_id,
read_options,
query=query_pb,
**kwargs,
request={
"project_id": self._query.project,
"partition_id": partition_id,
"read_options": read_options,
"query": query_pb,
**kwargs,
}
)
entity_pbs = self._process_query_results(response_pb)
return page_iterator.Page(self, entity_pbs, self.item_to_value)
Expand Down Expand Up @@ -702,7 +687,7 @@ def _item_to_entity(iterator, entity_pb):
:rtype: :class:`~google.cloud.datastore.entity.Entity`
:returns: The next entity in the page.
"""
return helpers.entity_from_protobuf(entity_pb)
return helpers.entity_from_protobuf(entity_pb._pb)


# pylint: enable=unused-argument
2 changes: 1 addition & 1 deletion google/cloud/datastore/transaction.py
Expand Up @@ -233,7 +233,7 @@ def begin(self, retry=None, timeout=None):

try:
response_pb = self._client._datastore_api.begin_transaction(
self.project, **kwargs
project_id=self.project, **kwargs
)
self._id = response_pb.transaction
except: # noqa: E722 do not use bare except, specify exception instead
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/test_batch.py
Expand Up @@ -525,7 +525,7 @@ def _make_commit_response(*new_key_ids):
from google.cloud.datastore_v1.types import datastore as datastore_pb2

mutation_results = [_make_mutation(key_id) for key_id in new_key_ids]
return datastore_pb2.CommitResponse(mutation_results=mutation_results)
return datastore_pb2.CommitResponse(mutation_results=mutation_results)._pb


def _make_datastore_api(*new_key_ids):
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/test_query.py
Expand Up @@ -579,8 +579,8 @@ def _next_page_helper(self, txn_id=None, retry=None, timeout=None):
"partition_id": partition_id,
"read_options": read_options,
"query": empty_query,
**kwargs,
},
**kwargs
)

def test__next_page(self):
Expand Down

0 comments on commit da8300d

Please sign in to comment.