Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: add inline type hints #134

Merged
merged 27 commits into from Aug 6, 2020
Merged
Show file tree
Hide file tree
Changes from 11 commits
Commits
Show all changes
27 commits
Select commit Hold shift + click to select a range
e31f7ba
feat: add type hits via pytype
crwilcox Jul 28, 2020
4fc72b7
process: run merge-pyi on some, manual corrections
crwilcox Jul 28, 2020
6a05227
process: run merge-pyi on some, manual corrections
crwilcox Jul 29, 2020
067a0d0
process: run merge-pyi on some, manual corrections
crwilcox Jul 29, 2020
57b88b3
fix: blacken
crwilcox Jul 29, 2020
697217f
docs: link to bug for pytype coroutine/union
crwilcox Jul 29, 2020
903b856
Merge branch 'v2-staging' into pytype
crwilcox Jul 31, 2020
44bd59c
fix: modify union coroutine types
crwilcox Jul 31, 2020
bcfd00c
fix: changes after merge
crwilcox Jul 31, 2020
50e6af7
fix: blacken
crwilcox Jul 31, 2020
b902bac
fix: narrow types, remove unused imports
crwilcox Aug 4, 2020
fb2cf23
fix: remove duplication
crwilcox Aug 4, 2020
f0ffd5a
fix: remove unnecessary union
crwilcox Aug 4, 2020
c07957a
fix: feedback
crwilcox Aug 4, 2020
bf2dbd8
fix: some import modifications
crwilcox Aug 4, 2020
fd7305d
fix: some import modifications
crwilcox Aug 4, 2020
85f1cc6
use 3.7 as 3.8 is instable for pytype
crwilcox Aug 4, 2020
c31bf2d
ignore other module types
crwilcox Aug 5, 2020
f1d3904
ignore other module types
crwilcox Aug 5, 2020
b509c72
ignore other module types
crwilcox Aug 5, 2020
67d828d
ignore other module types
crwilcox Aug 5, 2020
0d6cf2b
ignore other module types
crwilcox Aug 5, 2020
8f92945
ignore other module types
crwilcox Aug 5, 2020
e3c295d
ignore other module types
crwilcox Aug 5, 2020
6c3052e
ignore other module types
crwilcox Aug 5, 2020
cdec8ec
ignore other module types
crwilcox Aug 6, 2020
edf3c64
remove duplicate declarations
crwilcox Aug 6, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
4 changes: 4 additions & 0 deletions google/cloud/firestore.py
Expand Up @@ -46,6 +46,10 @@
from google.cloud.firestore_v1 import Watch
from google.cloud.firestore_v1 import WriteBatch
from google.cloud.firestore_v1 import WriteOption
from typing import List

__all__: List[str]
__version__: str


__all__ = [
Expand Down
Expand Up @@ -18,7 +18,7 @@
import abc
import typing

from google import auth
from google import auth # type: ignore
from google.api_core import exceptions # type: ignore
from google.api_core import operations_v1 # type: ignore
from google.auth import credentials # type: ignore
Expand Down
7 changes: 7 additions & 0 deletions google/cloud/firestore_v1/__init__.py
Expand Up @@ -97,6 +97,13 @@
# from .types.write import DocumentDelete
# from .types.write import DocumentRemove
from .types.write import DocumentTransform
from typing import List

__all__: List[str]
__version__: str

__all__: List[str]
__version__: str
crwilcox marked this conversation as resolved.
Show resolved Hide resolved

# from .types.write import ExistenceFilter
# from .types.write import Write
Expand Down
97 changes: 57 additions & 40 deletions google/cloud/firestore_v1/_helpers.py
Expand Up @@ -20,7 +20,7 @@
from google.type import latlng_pb2
import grpc

from google.cloud import exceptions
from google.cloud import exceptions # type: ignore
from google.cloud._helpers import _datetime_to_pb_timestamp
from google.api_core.datetime_helpers import DatetimeWithNanoseconds
from google.cloud.firestore_v1.types.write import DocumentTransform
Expand All @@ -31,6 +31,11 @@
from google.cloud.firestore_v1.types import common
from google.cloud.firestore_v1.types import document
from google.cloud.firestore_v1.types import write
from typing import Any, Generator, List, NoReturn, Optional, Tuple

_EmptyDict: transforms.Sentinel
_GRPC_ERROR_MAPPING: dict
_datetime_to_pb_timestamp: Any


BAD_PATH_TEMPLATE = "A path element must be a string. Received {}, which is a {}."
Expand Down Expand Up @@ -60,11 +65,11 @@ class GeoPoint(object):
longitude (float): Longitude of a point.
"""

def __init__(self, latitude, longitude):
def __init__(self, latitude, longitude) -> None:
self.latitude = latitude
self.longitude = longitude

def to_protobuf(self):
def to_protobuf(self) -> Any:
"""Convert the current object to protobuf.

Returns:
Expand Down Expand Up @@ -100,7 +105,7 @@ def __ne__(self, other):
return not equality_val


def verify_path(path, is_collection):
def verify_path(path, is_collection) -> None:
"""Verifies that a ``path`` has the correct form.

Checks that all of the elements in ``path`` are strings.
Expand Down Expand Up @@ -136,7 +141,7 @@ def verify_path(path, is_collection):
raise ValueError(msg)


def encode_value(value):
def encode_value(value) -> types.document.Value:
"""Converts a native Python value into a Firestore protobuf ``Value``.

Args:
Expand Down Expand Up @@ -200,7 +205,7 @@ def encode_value(value):
)


def encode_dict(values_dict):
def encode_dict(values_dict) -> dict:
"""Encode a dictionary into protobuf ``Value``-s.

Args:
Expand All @@ -214,7 +219,7 @@ def encode_dict(values_dict):
return {key: encode_value(value) for key, value in values_dict.items()}


def reference_value_to_document(reference_value, client):
def reference_value_to_document(reference_value, client) -> Any:
"""Convert a reference value string to a document.

Args:
Expand Down Expand Up @@ -248,7 +253,7 @@ def reference_value_to_document(reference_value, client):
return document


def decode_value(value, client):
def decode_value(value, client) -> Any:
"""Converts a Firestore protobuf ``Value`` to a native Python value.

Args:
Expand Down Expand Up @@ -294,7 +299,7 @@ def decode_value(value, client):
raise ValueError("Unknown ``value_type``", value_type)


def decode_dict(value_fields, client):
def decode_dict(value_fields, client) -> dict:
"""Converts a protobuf map of Firestore ``Value``-s.

Args:
Expand All @@ -311,7 +316,7 @@ def decode_dict(value_fields, client):
return {key: decode_value(value, client) for key, value in value_fields.items()}


def get_doc_id(document_pb, expected_prefix):
def get_doc_id(document_pb, expected_prefix) -> Any:
"""Parse a document ID from a document protobuf.

Args:
Expand Down Expand Up @@ -342,7 +347,9 @@ def get_doc_id(document_pb, expected_prefix):
_EmptyDict = transforms.Sentinel("Marker for an empty dict value")


def extract_fields(document_data, prefix_path, expand_dots=False):
def extract_fields(
document_data, prefix_path: FieldPath, expand_dots=False
) -> Generator[Tuple[Any, Any], Any, None]:
"""Do depth-first walk of tree, yielding field_path, value"""
if not document_data:
yield prefix_path, _EmptyDict
Expand All @@ -363,7 +370,7 @@ def extract_fields(document_data, prefix_path, expand_dots=False):
yield field_path, value


def set_field_value(document_data, field_path, value):
def set_field_value(document_data, field_path, value) -> None:
"""Set a value into a document for a field_path"""
current = document_data
for element in field_path.parts[:-1]:
Expand All @@ -373,7 +380,7 @@ def set_field_value(document_data, field_path, value):
current[field_path.parts[-1]] = value


def get_field_value(document_data, field_path):
def get_field_value(document_data, field_path) -> Any:
if not field_path.parts:
raise ValueError("Empty path")

Expand All @@ -394,7 +401,7 @@ class DocumentExtractor(object):
a document.
"""

def __init__(self, document_data):
def __init__(self, document_data) -> None:
self.document_data = document_data
self.field_paths = []
self.deleted_fields = []
Expand Down Expand Up @@ -440,7 +447,9 @@ def __init__(self, document_data):
self.field_paths.append(field_path)
set_field_value(self.set_fields, field_path, value)

def _get_document_iterator(self, prefix_path):
def _get_document_iterator(
self, prefix_path: FieldPath
) -> Generator[Tuple[Any, Any], Any, None]:
return extract_fields(self.document_data, prefix_path)

@property
Expand All @@ -465,10 +474,12 @@ def transform_paths(self):
+ list(self.minimums)
)

def _get_update_mask(self, allow_empty_mask=False):
def _get_update_mask(self, allow_empty_mask=False) -> None:
return None

def get_update_pb(self, document_path, exists=None, allow_empty_mask=False):
def get_update_pb(
self, document_path, exists=None, allow_empty_mask=False
) -> types.write.Write:

if exists is not None:
current_document = common.Precondition(exists=exists)
Expand All @@ -485,7 +496,7 @@ def get_update_pb(self, document_path, exists=None, allow_empty_mask=False):

return update_pb

def get_transform_pb(self, document_path, exists=None):
def get_transform_pb(self, document_path, exists=None) -> types.write.Write:
def make_array_value(values):
value_list = [encode_value(element) for element in values]
return document.ArrayValue(values=value_list)
Expand Down Expand Up @@ -565,7 +576,7 @@ def make_array_value(values):
return transform_pb


def pbs_for_create(document_path, document_data):
def pbs_for_create(document_path, document_data) -> List[types.write.Write]:
"""Make ``Write`` protobufs for ``create()`` methods.

Args:
Expand Down Expand Up @@ -597,7 +608,7 @@ def pbs_for_create(document_path, document_data):
return write_pbs


def pbs_for_set_no_merge(document_path, document_data):
def pbs_for_set_no_merge(document_path, document_data) -> List[types.write.Write]:
"""Make ``Write`` protobufs for ``set()`` methods.

Args:
Expand Down Expand Up @@ -632,7 +643,7 @@ class DocumentExtractorForMerge(DocumentExtractor):
""" Break document data up into actual data and transforms.
"""

def __init__(self, document_data):
def __init__(self, document_data) -> None:
super(DocumentExtractorForMerge, self).__init__(document_data)
self.data_merge = []
self.transform_merge = []
Expand All @@ -652,20 +663,20 @@ def has_updates(self):

return bool(update_paths)

def _apply_merge_all(self):
def _apply_merge_all(self) -> None:
self.data_merge = sorted(self.field_paths + self.deleted_fields)
# TODO: other transforms
self.transform_merge = self.transform_paths
self.merge = sorted(self.data_merge + self.transform_paths)

def _construct_merge_paths(self, merge):
def _construct_merge_paths(self, merge) -> Generator[Any, Any, None]:
for merge_field in merge:
if isinstance(merge_field, FieldPath):
yield merge_field
else:
yield FieldPath(*parse_field_path(merge_field))

def _normalize_merge_paths(self, merge):
def _normalize_merge_paths(self, merge) -> list:
merge_paths = sorted(self._construct_merge_paths(merge))

# Raise if any merge path is a parent of another. Leverage sorting
Expand All @@ -685,7 +696,7 @@ def _normalize_merge_paths(self, merge):

return merge_paths

def _apply_merge_paths(self, merge):
def _apply_merge_paths(self, merge) -> None:

if self.empty_document:
raise ValueError("Cannot merge specific fields with empty document.")
Expand Down Expand Up @@ -749,13 +760,15 @@ def _apply_merge_paths(self, merge):
if path in merged_transform_paths
}

def apply_merge(self, merge):
def apply_merge(self, merge) -> None:
if merge is True: # merge all fields
self._apply_merge_all()
else:
self._apply_merge_paths(merge)

def _get_update_mask(self, allow_empty_mask=False):
def _get_update_mask(
self, allow_empty_mask=False
) -> Optional[types.common.DocumentMask]:
# Mask uses dotted / quoted paths.
mask_paths = [
field_path.to_api_repr()
Expand All @@ -767,7 +780,9 @@ def _get_update_mask(self, allow_empty_mask=False):
return common.DocumentMask(field_paths=mask_paths)


def pbs_for_set_with_merge(document_path, document_data, merge):
def pbs_for_set_with_merge(
document_path, document_data, merge
) -> List[types.write.Write]:
"""Make ``Write`` protobufs for ``set()`` methods.

Args:
Expand Down Expand Up @@ -804,7 +819,7 @@ class DocumentExtractorForUpdate(DocumentExtractor):
""" Break document data up into actual data and transforms.
"""

def __init__(self, document_data):
def __init__(self, document_data) -> None:
super(DocumentExtractorForUpdate, self).__init__(document_data)
self.top_level_paths = sorted(
[FieldPath.from_string(key) for key in document_data]
Expand All @@ -825,10 +840,12 @@ def __init__(self, document_data):
"Cannot update with nest delete: {}".format(field_path)
)

def _get_document_iterator(self, prefix_path):
def _get_document_iterator(
self, prefix_path: FieldPath
) -> Generator[Tuple[Any, Any], Any, None]:
return extract_fields(self.document_data, prefix_path, expand_dots=True)

def _get_update_mask(self, allow_empty_mask=False):
def _get_update_mask(self, allow_empty_mask=False) -> types.common.DocumentMask:
mask_paths = []
for field_path in self.top_level_paths:
if field_path not in self.transform_paths:
Expand All @@ -837,7 +854,7 @@ def _get_update_mask(self, allow_empty_mask=False):
return common.DocumentMask(field_paths=mask_paths)


def pbs_for_update(document_path, field_updates, option):
def pbs_for_update(document_path, field_updates, option) -> List[types.write.Write]:
"""Make ``Write`` protobufs for ``update()`` methods.

Args:
Expand Down Expand Up @@ -878,7 +895,7 @@ def pbs_for_update(document_path, field_updates, option):
return write_pbs


def pb_for_delete(document_path, option):
def pb_for_delete(document_path, option) -> types.write.Write:
"""Make a ``Write`` protobuf for ``delete()`` methods.

Args:
Expand All @@ -905,7 +922,7 @@ class ReadAfterWriteError(Exception):
"""


def get_transaction_id(transaction, read_operation=True):
def get_transaction_id(transaction, read_operation=True) -> Any:
"""Get the transaction ID from a ``Transaction`` object.

Args:
Expand Down Expand Up @@ -935,7 +952,7 @@ def get_transaction_id(transaction, read_operation=True):
return transaction.id


def metadata_with_prefix(prefix, **kw):
def metadata_with_prefix(prefix: str, **kw) -> List[Tuple[str, str]]:
"""Create RPC metadata containing a prefix.

Args:
Expand All @@ -950,7 +967,7 @@ def metadata_with_prefix(prefix, **kw):
class WriteOption(object):
"""Option used to assert a condition on a write operation."""

def modify_write(self, write, no_create_msg=None):
def modify_write(self, write, no_create_msg=None) -> NoReturn:
"""Modify a ``Write`` protobuf based on the state of this write option.

This is a virtual method intended to be implemented by subclasses.
Expand Down Expand Up @@ -982,15 +999,15 @@ class LastUpdateOption(WriteOption):
as part of a "write result" protobuf or directly.
"""

def __init__(self, last_update_time):
def __init__(self, last_update_time) -> None:
self._last_update_time = last_update_time

def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self._last_update_time == other._last_update_time

def modify_write(self, write, **unused_kwargs):
def modify_write(self, write, **unused_kwargs) -> None:
"""Modify a ``Write`` protobuf based on the state of this write option.

The ``last_update_time`` is added to ``write_pb`` as an "update time"
Expand Down Expand Up @@ -1019,15 +1036,15 @@ class ExistsOption(WriteOption):
should already exist.
"""

def __init__(self, exists):
def __init__(self, exists) -> None:
self._exists = exists

def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self._exists == other._exists

def modify_write(self, write, **unused_kwargs):
def modify_write(self, write, **unused_kwargs) -> None:
"""Modify a ``Write`` protobuf based on the state of this write option.

If:
Expand Down