def test_add_vertex(self):
     G = mygraph.Graph()
     G.add_vertex('A')
     G.add_vertex('A')
     self.assertEqual(G.V(), 1)
     G.add_vertex('B')
     self.assertEqual(G.V(), 2)
     self.assertEqual(G.degree('A'), 0)
     self.assertEqual(G.degree('B'), 0)
예제 #2
0
def load_adjlist(fn, node_type='string', weight_type='float'):
    """
    loads only undirected graph, if multiple instances of the same edge is detected,
    their weights are summed up
    :param fn:
    :param node_type:
    :param weight_type:
    :return:
    """
    py_node_type = type2python(node_type)
    py_weight_type = type2python(weight_type)

    edgeset = set()  # check if the graph is undirected
    g = mygraph.Graph(node_type, weight_type)
    for line in open(fn, 'r'):
        fields = line.split()

        n = py_node_type(fields[0])
        if not g.exists(n):
            g.add_vertex(n)

        for v, w in zip(fields[1::2], fields[2::2]):
            v = py_node_type(v)
            w = py_weight_type(w)

            if v == n:
                print("[warning] loopback edge ({}, {}) detected".format(v, n))
                continue

            if not g.exists(v):
                g.add_vertex(v)
            
            if g.exists(n, v):
                raise RuntimeError("Multiple edges ({}, {}) found in {}".format(n, v, fn))

            if g.exists(v, n):  # check if the graph is undirected
                assert math.fabs(w - g.edge(v, n)) < 1e-6, \
                    "Inconsistent edge weight on ({}, {}), the graph is not undirected?" \
                    .format(v, n)
                edgeset.remove((v, n))
            else:
                edgeset.add((n, v))

            g.inc_edge(n, v, w)
    if len(edgeset) > 0:
        raise RuntimeError("One-sided edges detected".format(edgeset))
    return g
예제 #3
0
def graphtool2mygraph(g, **_):
    names = g.vp.get('name')
    if names:
        try:
            name_type = mgutils.format_type(names.value_type())
        except ValueError as e:
            print("Auto resolving type alias failed, try resolving with graph tool type system: " + e.message)
            name_type = mgutils.python2type(gtutils.type2python(names.value_type()))
    else:
        names = range(g.num_vertices())
        name_type = 'int'

    weight = g.ep.get('weight')
    if weight:
        try:
            weight_type = mgutils.format_type(names.value_type())
        except ValueError as e:
            print("Auto resolving type alias failed, try resolving with graph tool type system: " + e.message)
            weight_type = mgutils.python2type(gtutils.type2python(weight.value_type()))
    else:
        weight = utils.ConstantDict(1.0)
        weight_type = 'float'

    names = list(names)  # get rid of sluggish gt.vertex_properties

    mg = mygraph.Graph(name_type, weight_type)

    for n in names:
        mg.add_vertex(n)

    for e in g.edges():
        n1, n2 = names[int(e.source())], names[int(e.target())]
        # n1, n2 = names[e.source()], names[e.target()]
        if g.is_directed():
            mg.inc_edge(n1, n2, weight[e])
        else:
            mg.inc_edge(n1, n2, weight[e])
            mg.inc_edge(n2, n1, weight[e])

    return mg
    def test_add_edge(self):
        G = mygraph.Graph()
        G.add_vertex('A')
        G.add_edge('A', 'B', 10)
        self.assertEqual(G.V(), 2)
        self.assertEqual(G.E(), 1)
        self.assertEqual(G.degree('A'), 1)

        G.add_edge('B', 'A', 20)
        self.assertEqual(G.V(), 2)
        self.assertEqual(G.E(), 1)
        self.assertEqual(G.degree('A'), 1)

        G.add_edge('A', 'C', 20)
        self.assertEqual(G.V(), 3)
        self.assertEqual(G.E(), 2)
        self.assertEqual(G.degree('A'), 2)

        G.add_edge('B', 'C', 20)
        self.assertEqual(G.V(), 3)
        self.assertEqual(G.E(), 3)
        self.assertEqual(G.degree('B'), 2)
        self.assertEqual(G.degree('C'), 2)
    def test_load_input(self):
        graph = {'A': {'B': 1}, 'B': {'A': 1}}
        G = mygraph.Graph(graph)

        self.assertEqual(G.V(), 2)
        self.assertEqual(G.E(), 1)
    s = int(s)
    while d != s:
        print("% d <- % d" % (d, p[d - 1]))
        d = p[d - 1]


"""mg=mygraph.Graph()
data=mg
data.add_edge(mygraph.Edge(1,2,4))
data.add_edge(mygraph.Edge(1,3,3))
data.add_edge(mygraph.Edge(2,3,2))
data.add_edge(mygraph.Edge(2,4,1))
data.add_edge(mygraph.Edge(3,4,5))
par=bucket_dijkstra(data,1,5)
print(par)
printarr(1,par,4)"""
f = open('test-3.txt', 'r')
mydata = f.readlines()
f.close()
g = mygraph.Graph()
max_edge_weight = 0
for line in mydata[2:-2]:
    a, b, c = line.split()
    g.add_edge(mygraph.Edge(int(a), int(b), int(c)))
    if (max_edge_weight < int(c)):
        max_edge_weight = int(c)
s = int(mydata[-2])
par = bucket_dijkstra(g, s, max_edge_weight)
print(par)
d = int(mydata[-1])
printarr(s, par, d)
예제 #7
0
def deepwalk_process(args):

    start_time = time.time()  #processing time measurement

    logger = __get_logger()

    if args.format == "adjacency":
        graph_adjacency, num_nodes, num_edges = text_to_adjacency(args.input)
        G = mygraph.Graph(graph_adjacency, num_nodes, num_edges)  #graph object

    print("\nNumber of nodes: {}".format(G.num_of_nodes))
    print("\nNumber of edges: {}".format(G.num_of_edges))

    num_walks = G.num_of_nodes * args.number_walks

    print("\nNumber of walks: {}".format(num_walks))

    data_size = num_walks * args.walks_length

    print("\nData size (walks*length): {}".format(data_size))

    #Embedding phase
    print("\nWalking...")
    #shape(340 x 40)
    walks = G.build_deep_walk(num_paths=args.number_walks,
                              path_length=args.walks_length,
                              alpha=0,
                              rand=random.Random(args.seed))

    print("\nCounting vertex frequency...")
    vertex_counts = count_words(walks)  # dictionary

    print("\nTraining...")
    if args.model == 'skipgram':
        #create skipgram model
        language_model = Skipgram(sentences=walks,
                                  vocabulary_counts=vertex_counts,
                                  size=args.dimension,
                                  window=args.window_size,
                                  min_count=0,
                                  trim_rule=None,
                                  workers=cpu_count(),
                                  compute_loss=True,
                                  callbacks=[callback()])

        #save skipgram model
        language_model.save("skipgram_model")

        #reload skipgram model
        model = Skipgram.load("skipgram_model")

        ####-------------------------------------------------------------------------------####
        ####                              embedding Generation                             ####
        ####-------------------------------------------------------------------------------####

        # for t iterations do
        for t in range(args.epoch):
            # while not converged do -> minimize embedding loss term
            model.train(sentences=walks,
                        total_examples=1,
                        epochs=args.epoch,
                        compute_loss=True,
                        callbacks=[callback()])
            model.wv.save_word2vec_format(args.output)

            # load embeddings
            embedding_results = {}
            for i in range(G.num_of_nodes):
                embedding_results[i] = list(model.wv[str(i)])
                #embedding_results.append(model.wv[str(i)])

            embedding_dim = len(embedding_results[0])

            #64-dim embeddings (shale(34*64))
            original_embedding = []

            for i in list(embedding_results.keys()):
                original_embedding.append(embedding_results[i])

#         print(original_embedding)
#         print(np.array(original_embedding).shape)
#         exit()

####-------------------------------------------------------------------------------####
####                                     PCA                                       ####
####-------------------------------------------------------------------------------####

# convert n-dimensional embedding to 2-dim(to satisfy Theorem 1.)
            df = pd.DataFrame(columns=range(0, embedding_dim))

            for i in range(G.num_of_nodes):
                df.loc[i] = embedding_results[float(i)]

            #print(df)

            #Implement PCA to reduce dimensionality of embeddings

            #vector representation(embeddings) list
            X = df.values.tolist()
            #print(X)
            #Computing correlation of matrix
            X_corr = df.corr()

            #Computing eigen values and eigen vectors
            values, vectors = np.linalg.eig(X_corr)

            #Sorting the eigen vectors coresponding to eigen values in descending order
            arg = (-values).argsort()
            values = vectors[arg]
            vectors = vectors[:, arg]

            #Taking first 2 components which explain maximum variance for projecting
            new_vectors = vectors[:, :2]

            #Projecting it onto new dimesion with 2 axis
            neww_X = np.dot(X, new_vectors)
            neww_X = neww_X.real

            ####-------------------------------------------------------------------------------####
            ####                          curvature regularization phase                       ####
            ####-------------------------------------------------------------------------------####

            # while not converged do -> minimize curvature regularization term

            #generate random walks(walk length :5)

            walks_2 = G.build_deep_walk_for_abs(
                num_paths=args.number_walks,
                path_length=args.walks_length_2,
                alpha=0,
                rand=random.Random(args.seed))

            curvature_reg_model = curvature_regularization.abs_curvature_regularization(
                walks_2, neww_X, num_walks, G.num_of_nodes, model.syn1,
                args.dimension, original_embedding)

            # meet the condition of Theorem 1.
            curvature_reg_model.optimization()

            #minimize the two terms jointly

    else:
        raise Exception('language model is not Skipgram')

    total_time = time.time() - start_time

    #   print("\nTraining completed")
    #   print("\nembeddings have been generated")
    #
    print("\nProcessing time: {:.2f}".format(total_time))