-
Notifications
You must be signed in to change notification settings - Fork 11
/
graphing.py
149 lines (116 loc) · 5.5 KB
/
graphing.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
#from functools import partial
import numpy as np
import matplotlib.pyplot as plt
#import scipy
import theano
import numpy
from datasets import GaussianMixtureDistribution
def as_array(obj, dtype=theano.config.floatX):
"""Converts to ndarray of specified dtype"""
return numpy.asarray(obj, dtype=dtype)
def make_2D_latent_view(valid_data,
samples_data,
gradients_funs=None,
densities_funs=None,
epoch=None,
save_path=None):
"""
2D views of the latent and visible spaces
Parameters
----------
valid_data: dictionary of numpy arrays
Holds five keys: originals, labels, mu, sigma, encoding, reconstructions
samples_data: dictionary of numpy arrays
Holds two keys prior and samples
gradients_funs: dict of functions
Holds two keys: latent, for the gradients on the latent space w.r.p to Z and
visible, for the gradients ob the visible space
densities_fun: dictionary of functions
Holds two keys: latent, for the probability density of the latent space, and
visible, for the probability density on the latent space
"""
# Creating figure
fig = plt.figure()
# Getting Cmap
cmap = plt.cm.get_cmap('Spectral', 5)
# Adding visible subplot
recons_visible_ax = fig.add_subplot(221, aspect='equal')
# Train data
recons_visible_ax.scatter(valid_data['originals'][:, 0],
valid_data['originals'][:, 1],
c=valid_data['labels'],
marker='s', label='originals',
alpha=0.3, cmap=cmap)
recons_visible_ax.scatter(valid_data['reconstructions'][:, 0],
valid_data['reconstructions'][:, 1],
c=valid_data['labels'],
marker='x', label='reconstructions',
alpha=0.3,
cmap=cmap)
recons_visible_ax.set_title('Visible space. Epoch {}'.format(str(epoch)))
samples_visible_ax = fig.add_subplot(222, aspect='equal',
sharex=recons_visible_ax,
sharey=recons_visible_ax)
samples_visible_ax.scatter(valid_data['originals'][:, 0],
valid_data['originals'][:, 1],
c=valid_data['labels'],
marker='s', label='originals',
alpha=0.3,
cmap=cmap)
samples_visible_ax.scatter(samples_data['samples'][:, 0],
samples_data['samples'][:, 1],
marker='o', alpha=0.3, label='samples')
samples_visible_ax.set_title('Visible space. Epoch {}'.format(str(epoch)))
# plt.legend(loc="upper left", bbox_to_anchor=[0, 1],
# shadow=True, title="Legend", fancybox=True)
# visible_ax.get_legend()
# Adding latent subplot
recons_latent_ax = fig.add_subplot(223, aspect='equal')
recons_latent_ax.scatter(valid_data['encodings'][:, 0],
valid_data['encodings'][:, 1],
c=valid_data['labels'],
marker='x', label='encodings',
alpha=0.3, cmap=cmap)
recons_latent_ax.set_title('Latent space. Epoch {}'.format(str(epoch)))
samples_latent_ax = fig.add_subplot(224, aspect='equal',
sharex=recons_latent_ax,
sharey=recons_latent_ax)
samples_latent_ax.scatter(samples_data['noise'][:, 0],
samples_data['noise'][:, 1],
marker='o', label='noise',
alpha=0.3)
samples_latent_ax.set_title('Latent space. Epoch {}'.format(str(epoch)))
# plt.legend(loc="upper left", bbox_to_anchor=[0, 1],
# shadow=True, title="Legend", fancybox=True)
# latent_ax.get_legend()
plt.tight_layout()
if save_path is None:
plt.show()
else:
plt.savefig(save_path, transparent=True, bbox_inches='tight')
if __name__ == '__main__':
means = map(lambda x: as_array(x), [[0, 0],
[1, 1],
[-1, -1],
[1, -1],
[-1, 1]])
std = 0.01
variances = [np.eye(2) * std for _ in means]
priors = [1.0/len(means) for _ in means]
gaussian_mixture = GaussianMixtureDistribution(means=means,
variances=variances,
priors=priors)
originals, labels = gaussian_mixture.sample(1000)
reconstructions = originals * np.random.normal(size=originals.shape,
scale=0.05)
encodings = np.random.normal(size=(1000, 2))
train_data = {'originals': originals, 'labels': labels,
'encodings': encodings,
'reconstructions': reconstructions}
valid_data = train_data
noise = np.random.normal(size=(1000, 2))
samples = np.random.normal(size=(1000, 2), scale=0.3)
samples_data = {'noise': noise,
'samples': samples}
make_2D_latent_view(train_data, valid_data, samples_data)
#make_assignement_plots(valid_data)