-
Notifications
You must be signed in to change notification settings - Fork 5
/
utility.py
135 lines (108 loc) · 4.26 KB
/
utility.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
import numpy as np
import tensorflow as tf
def pairwise_add(u, v=None, is_batch=False):
"""
performs a pairwise summation between vectors (possibly the same)
Parameters:
----------
u: Tensor (n, ) | (n, 1)
v: Tensor (n, ) | (n, 1) [optional]
is_batch: bool
a flag for whether the vectors come in a batch
ie.: whether the vectors has a shape of (b,n) or (b,n,1)
Returns: Tensor (n, n)
Raises: ValueError
"""
u_shape = u.get_shape().as_list()
if len(u_shape) > 2 and not is_batch:
raise ValueError("Expected at most 2D tensors, but got %dD" % len(u_shape))
if len(u_shape) > 3 and is_batch:
raise ValueError("Expected at most 2D tensor batches, but got %dD" % len(u_shape))
if v is None:
v = u
else:
v_shape = v.get_shape().as_list()
if u_shape != v_shape:
raise ValueError("Shapes %s and %s do not match" % (u_shape, v_shape))
n = u_shape[0] if not is_batch else u_shape[1]
column_u = tf.reshape(u, (-1, 1) if not is_batch else (-1, n, 1))
U = tf.concat([column_u] * n, 1 if not is_batch else 2)
if v is u:
return U + tf.transpose(U, None if not is_batch else [0, 2, 1])
else:
row_v = tf.reshape(v, (1, -1) if not is_batch else (-1, 1, n))
V = tf.concat([row_v] * n, 0 if not is_batch else 1)
return U + V
def decaying_softmax(shape, axis):
rank = len(shape) # num dim
max_val = shape[axis]
weights_vector = np.arange(1, max_val + 1, dtype=np.float32)
weights_vector = weights_vector[::-1] # reversed
weights_vector = np.exp(weights_vector) / np.sum(np.exp(weights_vector)) # softmax weights
container = np.zeros(shape, dtype=np.float32)
broadcastable_shape = [1] * rank
broadcastable_shape[axis] = max_val
return container + np.reshape(weights_vector, broadcastable_shape) # the weight matrix is built, with axis is filled with softmax weights
def unpack_into_tensorarray(value, axis, size=None):
"""
unpacks a given tensor along a given axis into a TensorArray
Parameters:
----------
value: Tensor
the tensor to be unpacked
axis: int
the axis to unpack the tensor along
size: int
the size of the array to be used if shape inference resulted in None
Returns: TensorArray
the unpacked TensorArray
"""
shape = value.get_shape().as_list()
rank = len(shape)
dtype = value.dtype
array_size = shape[axis] if not shape[axis] is None else size
if array_size is None:
raise ValueError("Can't create TensorArray with size None")
array = tf.TensorArray(dtype=dtype, size=array_size) #size of the axis
dim_permutation = [axis] +list(range(1, axis)) + [0] + list(range(axis + 1, rank))
unpack_axis_major_value = tf.transpose(value, dim_permutation)# move axis values to the 0 dim
full_array = array.unstack(unpack_axis_major_value)
return full_array
def pack_into_tensor(array, axis):
"""
packs a given TensorArray into a tensor along a given axis
Parameters:
----------
array: TensorArray
the tensor array to pack
axis: int
the axis to pack the array along
Returns: Tensor
the packed tensor
"""
packed_tensor = array.stack() # add 1 dimension at the 0 dim
shape = packed_tensor.get_shape()
try:
rank = len(shape)
except:
print("unknow length of tensor array!!! assume rank 3")
rank = 3
dim_permutation = [axis] + list(range(1, axis)) + [0] + list(range(axis + 1, rank))
correct_shape_tensor = tf.transpose(packed_tensor, dim_permutation)# put the extra dimension to axis you want
return correct_shape_tensor
def pack_into_tensor2(array, axis):
"""
packs a given TensorArray into a tensor along a given axis
Parameters:
----------
array: TensorArray
the tensor array to pack
axis: int
the axis to pack the array along
Returns: Tensor
the packed tensor
"""
packed_tensor = array.stack() # add 1 dimension at the 0 dim
dim_permutation = [axis] + list(range(1, axis)) + [0] + list(range(axis + 1, 3))
correct_shape_tensor = tf.transpose(packed_tensor, dim_permutation)# put the extra dimension to axis you want
return correct_shape_tensor