Skip to content

Commit

Permalink
Start implementing array literals
Browse files Browse the repository at this point in the history
Resolves #631.
  • Loading branch information
evhub committed Dec 5, 2021
1 parent f9a8639 commit ad21951
Show file tree
Hide file tree
Showing 10 changed files with 182 additions and 24 deletions.
18 changes: 17 additions & 1 deletion DOCS.md
Expand Up @@ -398,6 +398,13 @@ _For more information on `reveal_type`, see [`reveal_type` and `reveal_locals`](

Sometimes, MyPy will not know how to handle certain Coconut constructs, such as `addpattern`. For the `addpattern` case, it is recommended to pass `--allow-redefinition` to MyPy (i.e. run `coconut <args> --mypy --allow-redefinition`), though in some cases `--allow-redefinition` may not be sufficient. In that case, either hide the offending code using [`TYPE_CHECKING`](#type_checking) or put a `# type: ignore` comment on the Coconut line which is generating the line MyPy is complaining about (you can figure out what line this is using `--line-numbers`) and the comment will be added to every generated line.

### `numpy` Integration

To allow for better use of [`numpy`](https://numpy.org/) objects in Coconut, all compiled Coconut code will do a number of special things to better integrate with `numpy` (if `numpy` is available to import when the code is run). Specifically:

- [`numpy.ndarray`](https://numpy.org/doc/stable/reference/generated/numpy.ndarray.html) is registered as a [`collections.abc.Sequence`](https://docs.python.org/3/library/collections.abc.html#collections.abc.Sequence), enabling it to be used in [sequence patterns](#semantics-specification).
- When a `numpy` object is passed to [`fmap`](#fmap), [`numpy.vectorize`](https://numpy.org/doc/stable/reference/generated/numpy.vectorize.html) is used instead of the default `fmap` implementation.

## Operators

```{contents}
Expand Down Expand Up @@ -1797,7 +1804,16 @@ _Can't be done without a complicated decorator definition and a long series of c

### Infix Functions

Coconut allows for infix function calling, where an expression that evaluates to a function is surrounded by backticks and then can have arguments placed in front of or behind it. Infix calling has a precedence in-between chaining and `None`-coalescing, and is left-associative. Additionally, infix notation supports a lambda as the last argument, despite lambdas having a lower precedence. Thus, ``a `func` b -> c`` is equivalent to `func(a, b -> c)`.
Coconut allows for infix function calling, where an expression that evaluates to a function is surrounded by backticks and then can have arguments placed in front of or behind it. Infix calling has a precedence in-between chaining and `None`-coalescing, and is left-associative.

The allowable notations for infix calls are:
```coconut
x `f` y => f(x, y)
`f` x => f(x)
x `f` => f(x)
`f` => f()
```
Additionally, infix notation supports a lambda as the last argument, despite lambdas having a lower precedence. Thus, ``a `func` b -> c`` is equivalent to `func(a, b -> c)`.

Coconut also supports infix function definition to make defining functions that are intended for infix usage simpler. The syntax for infix function definition is
```coconut
Expand Down
16 changes: 8 additions & 8 deletions coconut/compiler/compiler.py
Expand Up @@ -490,7 +490,7 @@ def bind(self):
self.decorators <<= attach(self.decorators_ref, self.decorators_handle)
self.unsafe_typedef_or_expr <<= attach(self.unsafe_typedef_or_expr_ref, self.unsafe_typedef_or_expr_handle)
self.testlist_star_expr <<= attach(self.testlist_star_expr_ref, self.testlist_star_expr_handle)
self.list_literal <<= attach(self.list_literal_ref, self.list_literal_handle)
self.list_expr <<= attach(self.list_expr_ref, self.list_expr_handle)
self.dict_literal <<= attach(self.dict_literal_ref, self.dict_literal_handle)
self.return_testlist <<= attach(self.return_testlist_ref, self.return_testlist_handle)
self.anon_namedtuple <<= attach(self.anon_namedtuple_ref, self.anon_namedtuple_handle)
Expand Down Expand Up @@ -2977,10 +2977,10 @@ def split_star_expr_tokens(self, tokens):
groups.pop()
return groups, has_star, has_comma

def testlist_star_expr_handle(self, original, loc, tokens, list_literal=False):
def testlist_star_expr_handle(self, original, loc, tokens, is_list=False):
"""Handle naked a, *b."""
groups, has_star, has_comma = self.split_star_expr_tokens(tokens)
is_sequence = has_comma or list_literal
is_sequence = has_comma or is_list

if not is_sequence:
if has_star:
Expand Down Expand Up @@ -3011,20 +3011,20 @@ def testlist_star_expr_handle(self, original, loc, tokens, list_literal=False):
else:
to_chain.append(g)

# return immediately, since we handle list_literal here
if list_literal:
# return immediately, since we handle is_list here
if is_list:
return "_coconut.list(_coconut.itertools.chain(" + ", ".join(to_chain) + "))"
else:
return "_coconut.tuple(_coconut.itertools.chain(" + ", ".join(to_chain) + "))"

if list_literal:
if is_list:
return "[" + out + "]"
else:
return out # the grammar wraps this in parens as needed

def list_literal_handle(self, original, loc, tokens):
def list_expr_handle(self, original, loc, tokens):
"""Handle non-comprehension list literals."""
return self.testlist_star_expr_handle(original, loc, tokens, list_literal=True)
return self.testlist_star_expr_handle(original, loc, tokens, is_list=True)

def dict_literal_handle(self, original, loc, tokens):
"""Handle {**d1, **d2}."""
Expand Down
60 changes: 57 additions & 3 deletions coconut/compiler/grammar.py
Expand Up @@ -27,6 +27,9 @@

from coconut.root import * # NOQA

from collections import defaultdict
from itertools import islice

from coconut._pyparsing import (
CaselessLiteral,
Forward,
Expand Down Expand Up @@ -166,6 +169,12 @@ def add_parens_handle(tokens):
return "(" + item + ")"


def add_bracks_handle(tokens):
"""Add brackets."""
item, = tokens
return "[" + item + "]"


def strip_parens_handle(tokens):
"""Strip parentheses."""
item, = tokens
Expand Down Expand Up @@ -515,6 +524,39 @@ def partial_op_item_handle(tokens):
raise CoconutInternalException("invalid operator function implicit partial token group", tok_grp)


def array_literal_handle(tokens):
"""Handle multidimensional array literals."""
internal_assert(len(tokens) >= 2, "invalid array literal arguments", tokens)

# find highest-level array literal seperators
sep_indices_by_level = defaultdict(list)
for i, sep in islice(enumerate(tokens), 1, None, 2):
internal_assert(sep.lstrip(";") == "", "invalid array literal separator", sep)
sep_indices_by_level[len(sep)].append(i)

# split by highest-level seperators
sep_level = max(sep_indices_by_level)
pieces = []
prev_ind = 0
for sep_ind in sep_indices_by_level[sep_level]:
pieces.append(tokens[prev_ind:sep_ind])
prev_ind = sep_ind + 1
pieces.append(tokens[prev_ind:])

# build multidimensional array
array_elems = []
for p in pieces:
if p:
subarr_literal = (
"_coconut_lift_arr("
+ (array_literal_handle(p) if len(p) > 1 else p[0]) + ", "
+ str(sep_level)
+ ")"
)
array_elems.append(subarr_literal)
return "[" + ", ".join(array_elems) + "]"


# end: HANDLERS
# -----------------------------------------------------------------------------------------------------------------------
# MAIN GRAMMAR:
Expand All @@ -535,6 +577,7 @@ class Grammar(object):
unsafe_colon = Literal(":")
colon = ~unsafe_dubcolon + ~colon_eq + unsafe_colon
semicolon = Literal(";") | invalid_syntax("\u037e", "invalid Greek question mark instead of semicolon", greedy=True)
multisemicolon = combine(OneOrMore(semicolon))
eq = Literal("==")
equals = ~eq + Literal("=")
lbrack = Literal("[")
Expand Down Expand Up @@ -993,11 +1036,22 @@ class Grammar(object):
),
)

list_literal = Forward()
list_literal_ref = lbrack.suppress() + testlist_star_namedexpr_tokens + rbrack.suppress()
list_expr = Forward()
list_expr_ref = testlist_star_namedexpr_tokens
array_literal = attach(
lbrack.suppress() + tokenlist(
attach(comprehension_expr, add_bracks_handle)
| namedexpr_test + ~comma
| list_expr,
multisemicolon,
suppress=False,
) + rbrack.suppress(),
array_literal_handle,
)
list_item = (
condense(lbrack + Optional(comprehension_expr) + rbrack)
| list_literal
| lbrack.suppress() + list_expr + rbrack.suppress()
| array_literal
)

keyword_atom = any_keyword_in(const_vars)
Expand Down
2 changes: 1 addition & 1 deletion coconut/compiler/header.py
Expand Up @@ -347,7 +347,7 @@ def pattern_prepender(func):
format_dict.update(
dict(
# when anything is added to this list it must also be added to *both* __coconut__.pyi stub files
underscore_imports="{tco_comma}{call_set_names_comma}{handle_cls_args_comma}_namedtuple_of, _coconut, _coconut_MatchError, _coconut_iter_getitem, _coconut_base_compose, _coconut_forward_compose, _coconut_back_compose, _coconut_forward_star_compose, _coconut_back_star_compose, _coconut_forward_dubstar_compose, _coconut_back_dubstar_compose, _coconut_pipe, _coconut_star_pipe, _coconut_dubstar_pipe, _coconut_back_pipe, _coconut_back_star_pipe, _coconut_back_dubstar_pipe, _coconut_none_pipe, _coconut_none_star_pipe, _coconut_none_dubstar_pipe, _coconut_bool_and, _coconut_bool_or, _coconut_none_coalesce, _coconut_minus, _coconut_map, _coconut_partial, _coconut_get_function_match_error, _coconut_base_pattern_func, _coconut_addpattern, _coconut_sentinel, _coconut_assert, _coconut_mark_as_match, _coconut_reiterable, _coconut_self_match_types, _coconut_dict_merge, _coconut_exec, _coconut_comma_op".format(**format_dict),
underscore_imports="{tco_comma}{call_set_names_comma}{handle_cls_args_comma}_namedtuple_of, _coconut, _coconut_MatchError, _coconut_iter_getitem, _coconut_base_compose, _coconut_forward_compose, _coconut_back_compose, _coconut_forward_star_compose, _coconut_back_star_compose, _coconut_forward_dubstar_compose, _coconut_back_dubstar_compose, _coconut_pipe, _coconut_star_pipe, _coconut_dubstar_pipe, _coconut_back_pipe, _coconut_back_star_pipe, _coconut_back_dubstar_pipe, _coconut_none_pipe, _coconut_none_star_pipe, _coconut_none_dubstar_pipe, _coconut_bool_and, _coconut_bool_or, _coconut_none_coalesce, _coconut_minus, _coconut_map, _coconut_partial, _coconut_get_function_match_error, _coconut_base_pattern_func, _coconut_addpattern, _coconut_sentinel, _coconut_assert, _coconut_mark_as_match, _coconut_reiterable, _coconut_self_match_types, _coconut_dict_merge, _coconut_exec, _coconut_comma_op, _coconut_lift_arr".format(**format_dict),
import_typing_NamedTuple=pycondition(
(3, 6),
if_lt='''
Expand Down
32 changes: 30 additions & 2 deletions coconut/compiler/templates/header.py_template
@@ -1,6 +1,13 @@
class _coconut{object}:{COMMENT.EVERYTHING_HERE_MUST_BE_COPIED_TO_STUB_FILE}
import collections, copy, functools, types, itertools, operator, threading, os, warnings, contextlib, traceback, weakref, multiprocessing, math
from multiprocessing import dummy as multiprocessing_dummy
try:
import numpy
except ImportError:
class you_need_to_install_numpy{object}: pass
numpy = you_need_to_install_numpy()
else:
collections.abc.Sequence.register(numpy.ndarray)
{maybe_bind_lru_cache}{import_asyncio}
{import_pickle}
{import_OrderedDict}
Expand Down Expand Up @@ -896,8 +903,7 @@ def fmap(func, obj):
if result is not _coconut.NotImplemented:
return result
if obj.__class__.__module__ in ("numpy", "pandas"):
from numpy import vectorize
return vectorize(func)(obj)
return _coconut.numpy.vectorize(func)(obj)
return _coconut_makedata(obj.__class__, *(_coconut_starmap(func, obj.items()) if _coconut.isinstance(obj, _coconut.abc.Mapping) else _coconut_map(func, obj)))
def memoize(maxsize=None, *args, **kwargs):
"""Decorator that memoizes a function, preventing it from being recomputed
Expand Down Expand Up @@ -1086,5 +1092,27 @@ def collectby(key_func, iterable, value_func=None, reduce_func=None):
def _namedtuple_of(**kwargs):
"""Construct an anonymous namedtuple of the given keyword arguments."""
{namedtuple_of_implementation}
def _coconut_lift_arr(arr, level):
if not level:
return arr
elif not _coconut.isinstance(arr, _coconut.abc.Sequence):
for _ in _coconut.range(level):
arr = [arr]
return arr
elif _coconut.len(arr) == 0:
for _ in _coconut.range(level - 1):
arr = [arr]
return arr
else:
arr_level = 1
inner_arr = arr[0]
while _coconut.isinstance(inner_arr, _coconut.abc.Sequence):
arr_level += 1
if len(inner_arr) < 1:
break
inner_arr = inner_arr[0]
for _ in _coconut.range(level - arr_level):
arr = [arr]
return arr
_coconut_self_match_types = (bool, bytearray, bytes, dict, float, frozenset, int, list, set, str, tuple)
_coconut_MatchError, _coconut_count, _coconut_enumerate, _coconut_filter, _coconut_makedata, _coconut_map, _coconut_reiterable, _coconut_reversed, _coconut_starmap, _coconut_tee, _coconut_zip, TYPE_CHECKING, reduce, takewhile, dropwhile = MatchError, count, enumerate, filter, makedata, map, reiterable, reversed, starmap, tee, zip, False, _coconut.functools.reduce, _coconut.itertools.takewhile, _coconut.itertools.dropwhile
2 changes: 1 addition & 1 deletion coconut/root.py
Expand Up @@ -26,7 +26,7 @@
VERSION = "2.0.0"
VERSION_NAME = "How Not to Be Seen"
# False for release, int >= 1 for develop
DEVELOP = 16
DEVELOP = 17
ALPHA = True

# -----------------------------------------------------------------------------------------------------------------------
Expand Down
27 changes: 26 additions & 1 deletion coconut/stubs/__coconut__.pyi
Expand Up @@ -148,7 +148,16 @@ else:
from itertools import izip_longest as _zip_longest


try:
import numpy as _numpy
except ImportError:
_numpy = ...
else:
_abc.Sequence.register(_numpy.ndarray)


class _coconut:
typing = _t # The real _coconut doesn't import typing, but we want type-checkers to treat it as if it does
collections = _collections
copy = _copy
functools = _functools
Expand All @@ -166,7 +175,7 @@ class _coconut:
abc = _abc
multiprocessing = _multiprocessing
multiprocessing_dummy = _multiprocessing_dummy
typing = _t # The real _coconut doesn't import typing, but we want type-checkers to treat it as if it does
numpy = _numpy
if sys.version_info >= (2, 7):
OrderedDict = staticmethod(collections.OrderedDict)
else:
Expand Down Expand Up @@ -801,3 +810,19 @@ def collectby(
def _namedtuple_of(**kwargs: _t.Dict[_t.Text, _T]) -> _t.Tuple[_T, ...]: ...
@_t.overload
def _namedtuple_of(**kwargs: _t.Dict[_t.Text, _t.Any]) -> _Tuple: ...


@_t.overload
def _coconut_lift_arr(arr: _t.Sequence[_T], level: _t.Literal[1]) -> _t.Sequence[_T]: ...
@_t.overload
def _coconut_lift_arr(arr: _T, level: _t.Literal[1]) -> _t.Sequence[_T]: ...

@_t.overload
def _coconut_lift_arr(arr: _t.Sequence[_t.Sequence[_T]], level: _t.Literal[2]) -> _t.Sequence[_t.Sequence[_T]]: ...
@_t.overload
def _coconut_lift_arr(arr: _t.Sequence[_T], level: _t.Literal[2]) -> _t.Sequence[_t.Sequence[_T]]: ...
@_t.overload
def _coconut_lift_arr(arr: _T, level: _t.Literal[2]) -> _t.Sequence[_t.Sequence[_T]]: ...

@_t.overload
def _coconut_lift_arr(arr: _t.Any, level: int) -> _t.Sequence[_t.Any]: ...
2 changes: 1 addition & 1 deletion coconut/stubs/coconut/__coconut__.pyi
@@ -1,2 +1,2 @@
from __coconut__ import *
from __coconut__ import _coconut_tail_call, _coconut_tco, _coconut_call_set_names, _coconut_handle_cls_kwargs, _coconut_handle_cls_stargs, _namedtuple_of, _coconut, _coconut_MatchError, _coconut_iter_getitem, _coconut_base_compose, _coconut_forward_compose, _coconut_back_compose, _coconut_forward_star_compose, _coconut_back_star_compose, _coconut_forward_dubstar_compose, _coconut_back_dubstar_compose, _coconut_pipe, _coconut_star_pipe, _coconut_dubstar_pipe, _coconut_back_pipe, _coconut_back_star_pipe, _coconut_back_dubstar_pipe, _coconut_none_pipe, _coconut_none_star_pipe, _coconut_none_dubstar_pipe, _coconut_bool_and, _coconut_bool_or, _coconut_none_coalesce, _coconut_minus, _coconut_map, _coconut_partial, _coconut_get_function_match_error, _coconut_base_pattern_func, _coconut_addpattern, _coconut_sentinel, _coconut_assert, _coconut_mark_as_match, _coconut_reiterable, _coconut_self_match_types, _coconut_dict_merge, _coconut_exec, _coconut_comma_op
from __coconut__ import _coconut_tail_call, _coconut_tco, _coconut_call_set_names, _coconut_handle_cls_kwargs, _coconut_handle_cls_stargs, _namedtuple_of, _coconut, _coconut_MatchError, _coconut_iter_getitem, _coconut_base_compose, _coconut_forward_compose, _coconut_back_compose, _coconut_forward_star_compose, _coconut_back_star_compose, _coconut_forward_dubstar_compose, _coconut_back_dubstar_compose, _coconut_pipe, _coconut_star_pipe, _coconut_dubstar_pipe, _coconut_back_pipe, _coconut_back_star_pipe, _coconut_back_dubstar_pipe, _coconut_none_pipe, _coconut_none_star_pipe, _coconut_none_dubstar_pipe, _coconut_bool_and, _coconut_bool_or, _coconut_none_coalesce, _coconut_minus, _coconut_map, _coconut_partial, _coconut_get_function_match_error, _coconut_base_pattern_func, _coconut_addpattern, _coconut_sentinel, _coconut_assert, _coconut_mark_as_match, _coconut_reiterable, _coconut_self_match_types, _coconut_dict_merge, _coconut_exec, _coconut_comma_op, _coconut_lift_arr
15 changes: 15 additions & 0 deletions tests/src/cocotest/agnostic/main.coco
Expand Up @@ -972,6 +972,21 @@ def main_test() -> bool:
@(def f -> f)
def ret1() = 1
assert ret1() == 1
assert (.,2)(1) == (1, 2) == (1,.)(2)
assert [1;] == [[1]] == [[1];]
assert [1;;] == [[[1]]] == [[1];;]
assert [[[1]];;] == [[[1]]] == [[1;];;]
assert [1;2] == [[1], [2]] == [1;2;] == [[1];[2]]
assert [1, 2; 3, 4] == [[1, 2], [3, 4]] == [[1,2]; [3,4];]
assert [
1; 2;;
3; 4;;
] == [[[1], [2]], [[3], [4]]] == [
[1; 2];;
[3; 4];;
]
assert [1, 2 ; 3, 4 ;; 5, 6 ; 7, 8] == [[[1, 2], [3, 4]], [[5, 6], [7, 8]]] == [1, 2 ; 3, 4 ;; 5, 6 ; 7, 8 ;]
assert [range(3) |> list ; x+1 for x in range(3)] == [[0, 1, 2], [1, 2, 3]] == [range(3) |> list ; x+1 for x in range(3) ;]
return True

def test_asyncio() -> bool:
Expand Down
32 changes: 26 additions & 6 deletions tests/src/extras.coco
@@ -1,5 +1,6 @@
from coconut.__coconut__ import consume as coc_consume # type: ignore
from collections.abc import Sequence

from coconut.__coconut__ import consume as coc_consume # type: ignore
from coconut.constants import (
IPY,
PY2,
Expand Down Expand Up @@ -195,16 +196,35 @@ def test_extras():
assert "map" in keyword_complete_result["matches"]
assert keyword_complete_result["cursor_start"] == 0
assert keyword_complete_result["cursor_end"] == 1
if not PYPY and (PY2 or PY34):
import numpy as np
assert np.all(fmap(-> _ + 1, np.arange(3)) == np.array([1, 2, 3]))
print("<success>")
return True


def test_numpy():
import numpy as np
assert isinstance(np.array([1, 2]) |> fmap$(.+1), np.ndarray)
assert np.all(fmap(-> _ + 1, np.arange(3)) == np.array([1, 2, 3]))
assert np.array([1, 2; 3, 4]).shape == (2, 2)
assert np.array([
1, 2;
np.array([3, 4]);
]).shape == (2, 2)
assert np.array([
np.array([1, 2; 3, 4]) ;;
np.array([5, 6; 7, 8]) ;;
]).shape == (2, 2, 2)
assert np.array([1, 2]) `isinstance` Sequence
[1, two] = np.array([1, 2])
assert two == 2
return True


def main():
if not PYPY and (PY2 or PY34):
assert test_numpy()
print("Expect Coconut errors below from running extras:")
print("(but make sure you get a <success> after them)")
test_extras()
assert test_extras()
print("<success>")
return True


Expand Down

0 comments on commit ad21951

Please sign in to comment.