def __init__(self, dataset, p=1, q=4, walk_length=100, num_walks=50, dimensions=200, window_size=30, workers=8, iterations=5): Node2Vec.__init__(self, False, True, False, p, q, walk_length, num_walks, dimensions, window_size, workers, iterations) self.dataset = dataset file = 'num%d_p%d_q%d_l%d_d%d_iter%d_winsize%d.emd' % ( num_walks, p, q, walk_length, dimensions, iterations, window_size) self.path = 'datasets/%s/node2vec/' % self.dataset + file if file not in os.listdir('datasets/%s/node2vec/' % self.dataset): self.run('datasets/%s/node2vec/altogether.edgelist' % self.dataset, self.path) self.node2vec_model = KeyedVectors.load_word2vec_format(self.path, binary=True)
def __init__(self, is_directed, preprocessing, is_weighted, p, q, walk_length, num_walks, dimensions, window_size, workers, iterations, feedback_file): Node2Vec.__init__(self, is_directed, preprocessing, is_weighted, p, q, walk_length, num_walks, dimensions, window_size, workers, iterations) self.feedback_file = feedback_file
def __init__(self, is_directed, preprocessing, is_weighted, p, q, walk_length, num_walks, dimensions, window_size, workers, iterations, config, sparql, dataset, entities, default_graph, entity_class, feedback_file): Node2Vec.__init__(self, is_directed, preprocessing, is_weighted, p, q, walk_length, num_walks, dimensions, window_size, workers, iterations) self.config_file = config self.sparql = sparql self.default_graph = default_graph self.dataset = dataset self.entities = entities self.entity_class = entity_class self.feedback_file = feedback_file self._define_properties()