Skip to content

Commit

Permalink
feat!: Leverage new generator, proto-plus, for google-cloud-datastore (
Browse files Browse the repository at this point in the history
…#104)

This uses the new microgenerator as the underlying transport for the cloud datastore client

files in services/, as well as tests/gapic, are gen'd

Major Changes: Discontinues python 2.7 support.

release-as: 2.0.0-dev1
  • Loading branch information
crwilcox committed Oct 30, 2020
1 parent b6bc2f7 commit 1723a26
Show file tree
Hide file tree
Showing 90 changed files with 11,473 additions and 11,732 deletions.
22 changes: 10 additions & 12 deletions .coveragerc
Expand Up @@ -14,25 +14,23 @@
# See the License for the specific language governing permissions and
# limitations under the License.

# Generated by synthtool. DO NOT EDIT!
[run]
branch = True
omit =
google/cloud/__init__.py

[report]
fail_under = 100
show_missing = True
omit =
google/cloud/__init__.py
google/cloud/datastore_v1/__init__.py
google/cloud/datastore_admin_v1/__init__.py
*/site-packages/*.py
exclude_lines =
# Re-enable the standard pragma
pragma: NO COVER
# Ignore debug-only repr
def __repr__
# Ignore abstract methods
raise NotImplementedError
omit =
*/gapic/*.py
*/proto/*.py
*/core/*.py
*/site-packages/*.py
google/cloud/__init__.py
# Ignore pkg_resources exceptions.
# This is added at the module level as a safeguard for if someone
# generates the code and tries to run it without pip installing. This
# makes it virtually impossible to test properly.
except pkg_resources.DistributionNotFound
6 changes: 6 additions & 0 deletions .kokoro/samples/python3.6/common.cfg
Expand Up @@ -13,6 +13,12 @@ env_vars: {
value: "py-3.6"
}

# Declare build specific Cloud project.
env_vars: {
key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
value: "python-docs-samples-tests-py36"
}

env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
value: "github/python-datastore/.kokoro/test-samples.sh"
Expand Down
6 changes: 6 additions & 0 deletions .kokoro/samples/python3.7/common.cfg
Expand Up @@ -13,6 +13,12 @@ env_vars: {
value: "py-3.7"
}

# Declare build specific Cloud project.
env_vars: {
key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
value: "python-docs-samples-tests-py37"
}

env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
value: "github/python-datastore/.kokoro/test-samples.sh"
Expand Down
6 changes: 6 additions & 0 deletions .kokoro/samples/python3.8/common.cfg
Expand Up @@ -13,6 +13,12 @@ env_vars: {
value: "py-3.8"
}

# Declare build specific Cloud project.
env_vars: {
key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
value: "python-docs-samples-tests-py38"
}

env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
value: "github/python-datastore/.kokoro/test-samples.sh"
Expand Down
6 changes: 1 addition & 5 deletions README.rst
Expand Up @@ -53,11 +53,7 @@ dependencies.

Supported Python Versions
^^^^^^^^^^^^^^^^^^^^^^^^^
Python >= 3.5

Deprecated Python Versions
^^^^^^^^^^^^^^^^^^^^^^^^^^
Python == 2.7. Python 2.7 support will be removed on January 1, 2020.
Python >= 3.6


Mac/Linux
Expand Down
2 changes: 1 addition & 1 deletion docs/admin_client.rst
@@ -1,6 +1,6 @@
Datastore Admin Client
======================

.. automodule:: google.cloud.datastore_admin_v1.gapic.datastore_admin_client
.. automodule:: google.cloud.datastore_admin_v1.services.datastore_admin.client
:members:
:show-inheritance:
6 changes: 4 additions & 2 deletions google/cloud/datastore/_gapic.py
Expand Up @@ -19,7 +19,8 @@

from google.cloud._helpers import make_secure_channel
from google.cloud._http import DEFAULT_USER_AGENT
from google.cloud.datastore_v1.gapic import datastore_client
from google.cloud.datastore_v1.services.datastore import client as datastore_client
from google.cloud.datastore_v1.services.datastore.transports import grpc


def make_datastore_api(client):
Expand All @@ -38,6 +39,7 @@ def make_datastore_api(client):
else:
channel = insecure_channel(host)

transport = grpc.DatastoreGrpcTransport(channel=channel)
return datastore_client.DatastoreClient(
channel=channel, client_info=client._client_info
transport=transport, client_info=client._client_info
)
6 changes: 3 additions & 3 deletions google/cloud/datastore/_http.py
Expand Up @@ -18,7 +18,7 @@

from google.cloud import _http as connection_module
from google.cloud import exceptions
from google.cloud.datastore_v1.proto import datastore_pb2 as _datastore_pb2
from google.cloud.datastore_v1.types import datastore as _datastore_pb2


DATASTORE_API_HOST = "datastore.googleapis.com"
Expand Down Expand Up @@ -108,9 +108,9 @@ def _rpc(http, project, method, base_url, client_info, request_pb, response_pb_c
:rtype: :class:`google.protobuf.message.Message`
:returns: The RPC message parsed from the response.
"""
req_data = request_pb.SerializeToString()
req_data = request_pb._pb.SerializeToString()
response = _request(http, project, method, req_data, base_url, client_info)
return response_pb_cls.FromString(response)
return response_pb_cls.deserialize(response)


def build_api_url(project, method, base_url):
Expand Down
24 changes: 16 additions & 8 deletions google/cloud/datastore/batch.py
Expand Up @@ -22,7 +22,7 @@
"""

from google.cloud.datastore import helpers
from google.cloud.datastore_v1.proto import datastore_pb2 as _datastore_pb2
from google.cloud.datastore_v1.types import datastore as _datastore_pb2


class Batch(object):
Expand Down Expand Up @@ -219,7 +219,7 @@ def delete(self, key):
raise ValueError("Key must be from same project as batch")

key_pb = key.to_protobuf()
self._add_delete_key_pb().CopyFrom(key_pb)
self._add_delete_key_pb()._pb.CopyFrom(key_pb._pb)

def begin(self):
"""Begins a batch.
Expand All @@ -242,9 +242,9 @@ def _commit(self, retry, timeout):
This is called by :meth:`commit`.
"""
if self._id is None:
mode = _datastore_pb2.CommitRequest.NON_TRANSACTIONAL
mode = _datastore_pb2.CommitRequest.Mode.NON_TRANSACTIONAL
else:
mode = _datastore_pb2.CommitRequest.TRANSACTIONAL
mode = _datastore_pb2.CommitRequest.Mode.TRANSACTIONAL

kwargs = {}

Expand All @@ -255,8 +255,15 @@ def _commit(self, retry, timeout):
kwargs["timeout"] = timeout

commit_response_pb = self._client._datastore_api.commit(
self.project, mode, self._mutations, transaction=self._id, **kwargs
request={
"project_id": self.project,
"mode": mode,
"transaction": self._id,
"mutations": self._mutations,
},
**kwargs,
)

_, updated_keys = _parse_commit_response(commit_response_pb)
# If the back-end returns without error, we are guaranteed that
# ``commit`` will return keys that match (length and
Expand Down Expand Up @@ -337,11 +344,11 @@ def _assign_entity_to_pb(entity_pb, entity):
:param entity: The entity being updated within the batch / transaction.
"""
bare_entity_pb = helpers.entity_to_protobuf(entity)
bare_entity_pb.key.CopyFrom(bare_entity_pb.key)
entity_pb.CopyFrom(bare_entity_pb)
bare_entity_pb._pb.key.CopyFrom(bare_entity_pb._pb.key)
entity_pb._pb.CopyFrom(bare_entity_pb._pb)


def _parse_commit_response(commit_response_pb):
def _parse_commit_response(commit_response):
"""Extract response data from a commit response.
:type commit_response_pb: :class:`.datastore_pb2.CommitResponse`
Expand All @@ -352,6 +359,7 @@ def _parse_commit_response(commit_response_pb):
:class:`.entity_pb2.Key` for each incomplete key
that was completed in the commit.
"""
commit_response_pb = commit_response._pb
mut_results = commit_response_pb.mutation_results
index_updates = commit_response_pb.index_updates
completed_keys = [
Expand Down
21 changes: 15 additions & 6 deletions google/cloud/datastore/client.py
Expand Up @@ -185,7 +185,12 @@ def _extended_lookup(
while loop_num < _MAX_LOOPS: # loop against possible deferred.
loop_num += 1
lookup_response = datastore_api.lookup(
project, key_pbs, read_options=read_options, **kwargs
request={
"project_id": project,
"keys": key_pbs,
"read_options": read_options,
},
**kwargs,
)

# Accumulate the new results.
Expand Down Expand Up @@ -535,7 +540,7 @@ def get_multi(
helpers.key_from_protobuf(deferred_pb) for deferred_pb in deferred
]

return [helpers.entity_from_protobuf(entity_pb) for entity_pb in entity_pbs]
return [helpers.entity_from_protobuf(entity_pb._pb) for entity_pb in entity_pbs]

def put(self, entity, retry=None, timeout=None):
"""Save an entity in the Cloud Datastore.
Expand Down Expand Up @@ -702,7 +707,8 @@ def allocate_ids(self, incomplete_key, num_ids, retry=None, timeout=None):
kwargs = _make_retry_timeout_kwargs(retry, timeout)

response_pb = self._datastore_api.allocate_ids(
incomplete_key.project, incomplete_key_pbs, **kwargs
request={"project_id": incomplete_key.project, "keys": incomplete_key_pbs},
**kwargs,
)
allocated_ids = [
allocated_key_pb.path[-1].id for allocated_key_pb in response_pb.keys
Expand Down Expand Up @@ -871,8 +877,9 @@ def reserve_ids_sequential(self, complete_key, num_ids, retry=None, timeout=None
key_pbs.append(key.to_protobuf())

kwargs = _make_retry_timeout_kwargs(retry, timeout)
self._datastore_api.reserve_ids(complete_key.project, key_pbs, **kwargs)

self._datastore_api.reserve_ids(
request={"project_id": complete_key.project, "keys": key_pbs}, **kwargs
)
return None

def reserve_ids(self, complete_key, num_ids, retry=None, timeout=None):
Expand Down Expand Up @@ -921,6 +928,8 @@ def reserve_ids_multi(self, complete_keys, retry=None, timeout=None):

kwargs = _make_retry_timeout_kwargs(retry, timeout)
key_pbs = [key.to_protobuf() for key in complete_keys]
self._datastore_api.reserve_ids(complete_keys[0].project, key_pbs, **kwargs)
self._datastore_api.reserve_ids(
request={"project_id": complete_keys[0].project, "keys": key_pbs}, **kwargs
)

return None

0 comments on commit 1723a26

Please sign in to comment.