-
Notifications
You must be signed in to change notification settings - Fork 7
/
node2vec.py
48 lines (41 loc) · 1.76 KB
/
node2vec.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
from gensim.models import Word2Vec
import walker
import time
import numpy as np
class Node2vec(object):
def __init__(self, graph, path_length, num_paths, dim, p=1.0, q=1.0, dw=False, **kwargs):
kwargs["workers"] = kwargs.get("workers", 1)
if dw:
kwargs["hs"] = 1
p = 1.0
q = 1.0
self.graph = graph
if dw:
self.walker = walker.BasicWalker(graph, workers=kwargs["workers"])
else:
self.walker = walker.Walker(graph, p=p, q=q, workers=kwargs["workers"])
# self.walker = walker.QuantumWalker(graph, p=p, q=q, workers=kwargs["workers"])
print("Preprocess transition probs...")
self.walker.preprocess_transition_probs()
sentences = self.walker.simulate_walks(num_walks=num_paths, walk_length=path_length)
kwargs["sentences"] = sentences
kwargs["min_count"] = kwargs.get("min_count", 0)
kwargs["size"] = kwargs.get("size", dim)
kwargs["sg"] = 1
self.size = kwargs["size"]
print("Learning representation...")
word2vec = Word2Vec(**kwargs)
self.vectors = {}
self.embeddings = np.zeros((graph.node_size, dim))
for word in graph.G.nodes():
self.vectors[word] = word2vec[word]
self.embeddings[graph.look_up_dict[word]] = word2vec[word]
del word2vec
def save_embeddings(self, filename):
fout = open(filename, 'w', encoding='utf-8')
node_num = len(self.vectors.keys())
fout.write("{} {}\n".format(node_num, self.size))
for node, vec in self.vectors.items():
fout.write("{} {}\n".format(node,
' '.join([str(x) for x in vec])))
fout.close()