Example #1
0
def main(data_set_name):
    dimensions = 1
    input_file = './graph/' + data_set_name + '.tsv'
    output_file = '../data/' + data_set_name + '.emb'
    # Instatiate the embedding method with hyperparameters
    lle = LocallyLinearEmbedding(dimensions)

    # Load graph
    # graph = graph_util.loadGraphFromEdgeListTxt(input_file)
    graph = networkx.read_weighted_edgelist(input_file)

    # Learn embedding - accepts a networkx graph or file with edge list
    embeddings_array, t = lle.learn_embedding(graph, edge_f=None, is_weighted=True, no_python=True)
    embeddings = pandas.DataFrame(embeddings_array)
    embeddings.to_csv(output_file, sep=' ', na_rep=0.1)
Example #2
0
def main(args):

    # Load edgelist
    G = graph_util.loadGraphFromEdgeListTxt(args.input, directed=args.directed)
    G = G.to_directed()

    # Preprocess the graph
    # G, _ = prep_graph(G)

    if args.method == 'gf':
        # GF takes embedding dimension (d), maximum iterations (max_iter), learning rate (eta),
        # regularization coefficient (regu) as inputs
        model = GraphFactorization(d=args.dimension,
                                   max_iter=args.max_iter,
                                   eta=args.eta,
                                   regu=args.regu)
    elif args.method == 'hope':
        # HOPE takes embedding dimension (d) and decay factor (beta) as inputs
        model = HOPE(d=args.dimension, beta=args.beta)
    elif args.method == 'lap':
        # LE takes embedding dimension (d) as input
        model = LaplacianEigenmaps(d=args.dimension)
    elif args.method == 'lle':
        # LLE takes embedding dimension (d) as input
        model = LocallyLinearEmbedding(d=args.dimension)
    elif args.method == 'sdne':
        encoder_layer_list = ast.literal_eval(args.encoder_list)
        # SDNE takes embedding dimension (d), seen edge reconstruction weight (beta), first order proximity weight
        # (alpha), lasso regularization coefficient (nu1), ridge regreesion coefficient (nu2), number of hidden layers
        # (K), size of each layer (n_units), number of iterations (n_ite), learning rate (xeta), size of batch (n_batch)
        # location of modelfile and weightfile save (modelfile and weightfile) as inputs
        model = SDNE(d=args.dimension,
                     beta=args.beta,
                     alpha=args.alpha,
                     nu1=args.nu1,
                     nu2=args.nu2,
                     K=len(encoder_layer_list),
                     n_units=encoder_layer_list,
                     n_iter=args.max_iter,
                     xeta=args.learning_rate,
                     n_batch=args.bs)
        # , modelfile=['enc_model.json', 'dec_model.json'], weightfile=['enc_weights.hdf5', 'dec_weights.hdf5'])
    else:
        raise ValueError('The requested method does not exist!')

    # Learn the node embeddings
    Y, t = model.learn_embedding(graph=G,
                                 edge_f=None,
                                 is_weighted=args.weighted,
                                 no_python=True)
    Z = np.real_if_close(Y, tol=1000)

    # Save the node embeddings to a file
    np.savetxt(args.output, Z, delimiter=',', fmt='%f')
Example #3
0
    def __init__(self, dim=4, models=[]):
        # Initialize set of possible models
        # see "Graph Embedding Techniques, Applications, and Performance: A Survey" by
        # Goyal and Ferrera (2017) for a taxonomy of graph embedding methods

        if not models:  # if no models specified, create some default ones
            # Presently all methods are "factorization based methods"
            # first method very expensive, unless C++ version installed
            # models.append(GraphFactorization(d=2, max_iter=100000, eta=1*10**-4, regu=1.0))
            models.append(HOPE(d=dim, beta=0.01))
            models.append(LaplacianEigenmaps(d=dim))
            models.append(LocallyLinearEmbedding(d=dim))
            # The following "random walk based" and "deep learning based" methods will be enabled in the future
            # models.append(node2vec(d=2, max_iter=1, walk_len=80, num_walks=10, con_size=10, ret_p=1, inout_p=1))
            # models.append(SDNE(d=2, beta=5, alpha=1e-5, nu1=1e-6, nu2=1e-6, K=3,n_units=[50, 15,], rho=0.3, n_iter=50, xeta=0.01,n_batch=500,
            #                modelfile=['./intermediate/enc_model.json', './intermediate/dec_model.json'],
            #                weightfile=['./intermediate/enc_weights.hdf5', './intermediate/dec_weights.hdf5']))
        self.models = models
    def _get_embeddings(self, embedding_space):

        # You can comment out the methods you don't want to run
        models = list()
        for embed_method in self.embeddings:
##            if embed_method == EMEDDINGS.GRAPH_FACTORIZATIONE_MBEDDINGS:
##                models.append(GraphFactorization(embedding_space, 100000, 1 * 10 ** -4, 1.0))
            if embed_method == EMEDDINGS.LAPLACIAN_EIGENMAPS_EMBEDDINGS:
                models.append(LaplacianEigenmaps(embedding_space))
            if embed_method == EMEDDINGS.LOCALLY_LINEAR_EMBEDDING:
                models.append(LocallyLinearEmbedding(embedding_space))
            if embed_method == EMEDDINGS.HOPE_EMBEDDING:
                models.append(HOPE(2 + 1, 0.01))
            if embed_method == EMEDDINGS.NODE2VEC_EMBEDDING_EMBEDDINGS:
                models.append(node2vec(2, 1, 80, 10, 10, 1, 1))
            # Embeddings I was unable to get working yet - it seems that HOPE converts k to k+1 for some reason....
            # if embed_method == EMEDDINGS.SDNE_EMBEDDING_EMBEDDINGS:
            #     models.append(SDNE(d=2, beta=5, alpha=1e-5, nu1=1e-6, nu2=1e-6, K=3,n_units=[50, 15,], rho=0.3, n_iter=50, xeta=0.01,n_batch=500,
            #                modelfile=[base_path + '/intermediate/enc_model.json', base_path + '/intermediate/dec_model.json'],
            #                weightfile=[base_path + '/intermediate/enc_weights.hdf5', base_path + '/intermediate/dec_weights.hdf5']))
        return models
 def locallyLinearEmbedding(netData, **kwargs):
     d = kwargs.get('d', 2)
     from gem.embedding.lle import LocallyLinearEmbedding
     emb = LocallyLinearEmbedding(d=d)
     return attMethods.GEMexport(netData, emb)