Skip to content

Commit

Permalink
Remove unnecessary typing dependency. (#16776)
Browse files Browse the repository at this point in the history
Signed-off-by: Edward Z. Yang <ezyang@fb.com>
  • Loading branch information
ezyang authored and soumith committed Feb 7, 2019
1 parent 48fcfda commit 8322165
Showing 1 changed file with 0 additions and 14 deletions.
14 changes: 0 additions & 14 deletions torch/functional.py
Expand Up @@ -7,7 +7,6 @@
from itertools import product
import math
import warnings
from typing import Optional, Tuple, List, Union

__all__ = [
'argmax',
Expand All @@ -31,7 +30,6 @@
]


@annotate(ret=List[torch.Tensor], tensors=torch.Tensor)
def broadcast_tensors(*tensors):
r"""broadcast_tensors(*tensors) -> List of Tensors
Expand All @@ -54,8 +52,6 @@ def broadcast_tensors(*tensors):
return torch._C._VariableFunctions.broadcast_tensors(tensors)


@annotate(ret=List[torch.Tensor], tensor=torch.Tensor,
split_size_or_sections=Union[List[int], int], dim=int)
def split(tensor, split_size_or_sections, dim=0):
r"""Splits the tensor into chunks.
Expand All @@ -81,10 +77,6 @@ def split(tensor, split_size_or_sections, dim=0):
return tensor.split(split_size_or_sections, dim)


@annotate(ret=Tuple[torch.Tensor, torch.Tensor],
A=torch.Tensor,
info=Optional[torch.Tensor],
pivot=bool)
def btrifact(A, info=None, pivot=True):
r"""Batch LU factorization.
Expand Down Expand Up @@ -132,9 +124,6 @@ def btrifact(A, info=None, pivot=True):
return A.btrifact(info, pivot)


@annotate(ret=Tuple[Optional[torch.Tensor], Optional[torch.Tensor], Optional[torch.Tensor]],
LU_data=torch.Tensor, LU_pivots=torch.Tensor,
unpack_data=bool, unpack_pivots=bool)
def btriunpack(LU_data, LU_pivots, unpack_data=True, unpack_pivots=True):
r"""Unpacks the data and pivots from a batched LU factorization (btrifact) of a tensor.
Expand Down Expand Up @@ -179,9 +168,6 @@ def btriunpack(LU_data, LU_pivots, unpack_data=True, unpack_pivots=True):
return P, L, U


@annotate(ret=torch.Tensor,
equation=str,
operands=torch.Tensor)
def einsum(equation, *operands):
r"""einsum(equation, *operands) -> Tensor
Expand Down

0 comments on commit 8322165

Please sign in to comment.