def simulate_cascade(g, p, source=None, return_tree=False):
    """
    graph_tool version of simulating cascade
    return np.ndarray on vertices as the infection time in cascade
    uninfected node has dist -1
    """
    if source is None:
        source = random.choice(np.arange(g.num_vertices(), dtype=int))
    gv = sample_graph_by_p(g, p)

    times = get_infection_time(gv, source)
    if return_tree:
        all_edges = set()
        for target in np.nonzero(times != -1)[0]:
            path = shortest_path(gv, source=source,
                                 target=gv.vertex(target))[0]
            edges = set(zip(path[:-1], path[1:]))
            all_edges |= edges
        tree = Graph(directed=True)
        for _ in range(g.num_vertices()):
            tree.add_vertex()
        for u, v in all_edges:
            tree.add_edge(int(u), int(v))
        return source, times, tree
    else:
        return source, times
Esempio n. 2
0
def mwgm_graph_tool(pairs, sim_mat):
    from graph_tool.all import Graph, max_cardinality_matching
    if not isinstance(pairs, list):
        pairs = list(pairs)
    g = Graph()
    weight_map = g.new_edge_property("float")
    nodes_dict1 = dict()
    nodes_dict2 = dict()
    edges = list()
    for x, y in pairs:
        if x not in nodes_dict1.keys():
            n1 = g.add_vertex()
            nodes_dict1[x] = n1
        if y not in nodes_dict2.keys():
            n2 = g.add_vertex()
            nodes_dict2[y] = n2
        n1 = nodes_dict1.get(x)
        n2 = nodes_dict2.get(y)
        e = g.add_edge(n1, n2)
        edges.append(e)
        weight_map[g.edge(n1, n2)] = sim_mat[x, y]
    print("graph via graph_tool", g)
    res = max_cardinality_matching(g,
                                   heuristic=True,
                                   weight=weight_map,
                                   minimize=False)
    edge_index = np.where(res.get_array() == 1)[0].tolist()
    matched_pairs = set()
    for index in edge_index:
        matched_pairs.add(pairs[index])
    return matched_pairs
Esempio n. 3
0
def alignment_graph(lengths=[], pairings=[], alignments=[]):
    #print('making graph')
    g = Graph(directed=False)
    seq_index = g.new_vertex_property("int")
    time = g.new_vertex_property("int")
    #add vertices
    g.add_vertex(sum(lengths))
    seq_index.a = np.concatenate([np.repeat(i,l) for i,l in enumerate(lengths)])
    time.a = np.concatenate([np.arange(l) for l in lengths])
    #add edges (alignments)
    alignment_index = g.new_edge_property("int")
    segment_index = g.new_edge_property("int")
    for i,a in enumerate(alignments):
        if len(a) > 0:
            j, k = pairings[i]
            pairs = np.concatenate(a, axis=0)
            indicesJ = (np.arange(lengths[j]) + sum(lengths[:j]))[pairs.T[0]]
            indicesK = (np.arange(lengths[k]) + sum(lengths[:k]))[pairs.T[1]]
            seg_indices = np.concatenate([np.repeat(i, len(a))
                for i,a in enumerate(a)])
            g.add_edge_list(np.vstack([indicesJ, indicesK,
                np.repeat(i, len(pairs)), seg_indices]).T,
                eprops=[alignment_index, segment_index])
    #g.add_edge_list([(b, a) for (a, b) in g.edges()])
    #print('created alignment graph', g)
    #g = prune_isolated_vertices(g)
    #print('pruned alignment graph', g)
    #g = transitive_closure(g)
    #graph_draw(g, output_size=(1000, 1000), output="results/casey_jones_bars.pdf")
    return g, seq_index, time, alignment_index, segment_index
Esempio n. 4
0
def build_word_graph(model_fname, limiar=0.2):
    """
    Constroi um grafo de walavras ponderado pela similaridade entre elas
    de acordo com o modelo.
    :param model_fname: Nome do arquivo com o modelo word2vec como foi salvo
    :return: objeto grafo
    """
    m = Word2Vec.load(model_fname)
    g = Graph()
    freq = g.new_vertex_property("int")
    weight = g.new_edge_property("float")
    i = 0
    vdict = {}
    for w1, w2 in combinations(m.vocab.keys(), 2):
        if w1 == '' or w2 == '':
            continue
        # print(w1,w2)

        v1 = g.add_vertex() if w1 not in vdict else vdict[w1]
        vdict[w1] = v1
        freq[v1] = m.vocab[w1].count
        v2 = g.add_vertex() if w2 not in vdict else vdict[w2]
        vdict[w2] = v2
        freq[v2] = m.vocab[w2].count
        sim = m.similarity(w1, w2)
        if sim > 0.1:
            e = g.add_edge(v1, v2)
            weight[e] = sim
        if i > 10000:
            break
        i += 1
    g.vertex_properties['freq'] = freq
    g.edge_properties['sim'] = weight
    return g
Esempio n. 5
0
class ZonedNetwork:
    def __init__(self,
                 size: Tuple[int] = (10, 10),
                 field_size: Tuple[int] = (100, 100)):
        self.g = Graph(directed=True)
        self.n_zones = size[0] * size[1]
        self.fwidth = field_size[0]
        self.fheight = field_size[1]
        self.n_rows = size[0]
        self.n_cols = size[1]
        self.row_size: float = self.fheight / self.n_rows
        self.col_size: float = self.fwidth / self.n_cols
        self.g.add_vertex(self.n_zones)

    def get_zone(self, coords: Tuple):
        r = int(coords[1] / self.row_size)
        c = int(coords[0] / self.col_size)
        r = min(self.n_rows - 1, r)
        c = min(self.n_cols - 1, c)
        return self.g.vertex(r * self.n_cols + c)

    def add_passes(self, coords_pairs: List[Tuple]):
        pairs = [(self.get_zone((x1, y1)), self.get_zone((x2, y2)))
                 for x1, y1, x2, y2 in coords_pairs]
        return self.g.add_edge_list(pairs)

    def save(self, file: str):
        self.g.save(file, fmt='graphml')
def test_mospp_small():
    G = Graph()
    G.add_vertex(1)
    G.add_vertex(2)
    G.add_vertex(3)
    G.add_vertex(4)
    c1 = G.new_edge_property("int")
    c2 = G.new_edge_property("int")
    e1 = G.add_edge(1, 3)
    e2 = G.add_edge(3, 4)
    e3 = G.add_edge(1, 2)
    e4 = G.add_edge(2, 4)
    e5 = G.add_edge(1, 4)
    c1[e1] = 1
    c1[e2] = 1
    c1[e3] = 0
    c1[e4] = 0
    c1[e5] = 2
    c2[e1] = 1
    c2[e2] = 1
    c2[e3] = 1
    c2[e4] = 1
    c2[e5] = 0
    assert [[G.vertex_index[r] for r in route]
            for route in mospp(G.vertex(1), G.vertex(4), c1, c2)
            ] == [[1, 4], [1, 2, 4]]
Esempio n. 7
0
def build_word_graph(model_fname, limiar=0.2):
    """
    Constroi um grafo de walavras ponderado pela similaridade entre elas
    de acordo com o modelo.
    :param model_fname: Nome do arquivo com o modelo word2vec como foi salvo
    :return: objeto grafo
    """
    m = Word2Vec.load(model_fname)
    g = Graph()
    freq = g.new_vertex_property("int")
    weight = g.new_edge_property("float")
    i = 0
    vdict = {}
    for w1, w2 in combinations(m.vocab.keys(), 2):
        if w1 == '' or w2 == '':
            continue
        # print(w1,w2)

        v1 = g.add_vertex() if w1 not in vdict else vdict[w1]
        vdict[w1] = v1
        freq[v1] = m.vocab[w1].count
        v2 = g.add_vertex() if w2 not in vdict else vdict[w2]
        vdict[w2] = v2
        freq[v2] = m.vocab[w2].count
        sim = m.similarity(w1, w2)
        if sim > 0.1:
            e = g.add_edge(v1, v2)
            weight[e] = sim
        if i > 10000:
            break
        i += 1
    g.vertex_properties['freq'] = freq
    g.edge_properties['sim'] = weight
    return g
def simulate_cascade(g, p, source=None, return_tree=False):
    """
    graph_tool version of simulating cascade
    return np.ndarray on vertices as the infection time in cascade
    uninfected node has dist -1
    """
    if source is None:
        source = random.choice(np.arange(g.num_vertices(), dtype=int))
    gv = sample_graph_by_p(g, p)

    times = get_infection_time(gv, source)
    if return_tree:
        all_edges = set()
        for target in np.nonzero(times != -1)[0]:
            path = shortest_path(gv, source=source, target=gv.vertex(target))[0]
            edges = set(zip(path[:-1], path[1:]))
            all_edges |= edges
        tree = Graph(directed=True)
        for _ in range(g.num_vertices()):
            tree.add_vertex()
        for u, v in all_edges:
            tree.add_edge(int(u), int(v))
        return source, times, tree
    else:
        return source, times
Esempio n. 9
0
def simulate_cascade(g, p, source=None, return_tree=False):
    """
    graph_tool version of simulating cascade
    return np.ndarray on vertices as the infection time in cascade
    uninfected node has dist -1
    """
    gv = sample_graph_by_p(g, p)

    if source is None:
        # consider the largest cc
        infected_nodes = np.nonzero(label_largest_component(gv).a)[0]
        source = np.random.choice(infected_nodes)

    times = get_infection_time(gv, source)

    if return_tree:
        # get the tree edges
        _, pred_map = shortest_distance(gv, source=source, pred_map=True)
        edges = [(pred_map[i], i) for i in infected_nodes if i != source]

        # create tree
        tree = Graph(directed=True)
        tree.add_vertex(g.num_vertices())
        for u, v in edges:
            tree.add_edge(int(u), int(v))
            vfilt = tree.new_vertex_property('bool')
            vfilt.a = False
        for v in set(itertools.chain(*edges)):
            vfilt[v] = True
        tree.set_vertex_filter(vfilt)

    if return_tree:
        return source, times, tree
    else:
        return source, times
Esempio n. 10
0
def graph_from_matrix(matrix, directed=False):
    g = Graph(directed=directed)
    g.add_vertex(len(matrix))
    weights = g.new_ep("float")
    edges = np.nonzero(matrix)
    edges = np.append(edges, [matrix[edges]], axis=0)
    g.add_edge_list(list(zip(*edges)), eprops=[weights])
    #graph_draw(g, output_size=(1000, 1000), output="results/structure.pdf")
    return g, weights
Esempio n. 11
0
class StackGraph(object):
    def __init__(self):
        self.g = None

    def load(self, filename):
        # Initialize the graph
        self.g = Graph()
        # Each node will store a FunctionWrapper() class instance.
        self.g.vertex_properties["functions"] = self.g.new_vertex_property("object")
        self.g.vertex_properties["display"] = self.g.new_vertex_property("string")
        # Each edge will store a [ ..tbd.. ] .
        self.g.edge_properties["calls"] = self.g.new_edge_property("object")

        # Load the log file and build the graph
        i = 0
        f = open(filename, "rb")
        for line in f:
            i += 1
            try:
                # Skip any informational lines
                if "*" in line:     continue
                # Extract a call stack snapshot
                words = line.split()
                time = words[0][2:]
                depth = words[1][2:]
                stack = [FunctionWrapper(instring=item) for item in words[2].split("->")]

                # Add the top 2 functions to the graph, if necessary.  Format: f1()->f2()
                f1, f2 = stack[-2], stack[-1]
                v1, v2 = None, None
                    # Search for the vertices
                for v in self.g.vertices():
                    if self.g.vp.functions[v] == f1:    v1 = v
                    if self.g.vp.functions[v] == f2:    v2 = v
                    if v1 != None and v2 != None:       break

                    # Add new vertices if needed
                if v1 == None:
                    v1 = self.g.add_vertex()
                    self.g.vp.functions[v1] = f1
                    self.g.vp.display[v1] = f1.graphDisplayString()
                if v2 == None:
                    v2 = self.g.add_vertex()
                    self.g.vp.functions[v2] = f2
                    self.g.vp.display[v2] = f2.graphDisplayString()

                # Add the edge if necessary, and then add data to it
                if not self.g.edge(v1, v2):
                    e = self.g.add_edge(v1, v2)
                    self.g.ep.calls[e] = CallList(v1, v2)

                self.g.ep.calls[e].addCall(time, depth)
            except Exception as e:
                print "Exception on line", i, ":", e
                print [str(x) for x in stack]
                exit()
Esempio n. 12
0
def get_incompatible_segments(g, seg_index, out_edges):
    incomp_graph = Graph(directed=False)
    num_segs = np.max(seg_index.a)+1
    incomp_graph.add_vertex(num_segs)
    for v in g.get_vertices():
        for vs in group_adjacent(sorted(g.get_out_neighbors(v))):
            edges = out_edges[v][np.where(np.isin(out_edges[v][:,1], vs))][:,2]
            segments = list(np.unique(seg_index.a[edges]))
            [incomp_graph.add_edge(s,t)
                for i,s in enumerate(segments) for t in segments[i+1:]]
    return label_components(incomp_graph)[0].a
Esempio n. 13
0
def test_graphtool():
    g = Graph(directed=True)
    g.add_vertex(4)
    g.add_edge_list([(0, 1), (1, 2), (2, 3), (3, 0)])
    weight = g.new_edge_property('float')
    weight[g.edge(0, 1)] = 1
    weight[g.edge(1, 2)] = 2
    weight[g.edge(2, 3)] = 3
    weight[g.edge(3, 0)] = 4
    
    assert set(gt2edges_and_weights(g, weight)) == {
        (0, 1, 1), (1, 2, 2), (2, 3, 3), (3, 0, 4)
    }
Esempio n. 14
0
def session_draw_bis_melty(sessions_id, weblog, weblog_columns_dict):
    """
    Draw the graph of sessions with sessions_id given in entry
    """
    from graph_tool.all import Graph
    from graph_tool.all import graph_draw
    session = weblog[weblog.session_id == sessions_id]
    session = session.rename(index=str,columns = {weblog_columns_dict['requested_page_column']:'requested_page',\
                                                  weblog_columns_dict['referrer_page_column']:'referrer_page'})
    s_pages = session[['requested_page', 'requested_external']]
    s_pages_ref = session[['referrer_page', 'referrer_external']]
    s_pages_ref = s_pages_ref.rename(index=str,
                                     columns={
                                         'referrer_page': 'requested_page',
                                         'referrer_external':
                                         'requested_external'
                                     })
    s_pages = s_pages.append(s_pages_ref)
    s_pages.drop_duplicates(subset='requested_page', inplace=True)
    g = Graph()
    v = {}
    halo = g.new_vertex_property("bool")
    for row in s_pages.itertuples():
        v[row.requested_page] = g.add_vertex()
        if row.requested_external:
            halo[v[row.requested_page]] = True
        else:
            halo[v[row.requested_page]] = False
    session.apply(
        lambda x: g.add_edge(v[x.referrer_page], v[x.requested_page]), axis=1)
    graph_draw(g,
               vertex_halo=halo,
               output="./_session" + str(sessions_id) + ".png")
    return
Esempio n. 15
0
def load_train(name):
    '''
    Training file is numbered from 0 to n. Not all nodes in the training file have their own row.
    '''
    g = Graph()
    node_ids = set()
    n = -1
    for n, (node_id, neighbor_ids) in enumerate(iter_adj_list(name)):
        node_ids.add(node_id)
        node_ids.update(neighbor_ids)
    n += 1
    g.add_vertex(len(node_ids))
    for i, (node_id, neighbor_ids) in enumerate(iter_adj_list(name)):
        print('adding edge for vertex {}/{}'.format(i + 1, n))
        for neighbor_id in neighbor_ids:
            g.add_edge(node_id, neighbor_id)
    return g
Esempio n. 16
0
def graph_from_pdb(pdb_str):
    g = Graph(directed=False)

    vertex_types = g.new_vertex_property("string")
    g.vertex_properties['type'] = vertex_types

    vertices = []

    def get_connect_list():
        connect_ids_list = [
            [int(str_id) for str_id in line.split()[1:]]
            for line in pdb_str.splitlines()
            if is_pdb_connect_line(line)
        ]

        return reduce(
            lambda acc, e: acc + e,
            [
                [
                    (i, j)
                    for (i, j) in map(
                        lambda i_j: (i_j[0] - 1, i_j[1] - 1),
                        zip(cycle(connect_ids[0:1]), connect_ids[1:]),
                    )
                    if i < j
                ]
                for connect_ids in connect_ids_list
            ],
            [],
        )

    connects = get_connect_list()

    def get_valence(atom_id):
        return sum([1 for connect in connects if atom_id in connect])

    atom_lines = [
        line for line in pdb_str.splitlines()
        if is_pdb_atom_line(line)
    ]

    for (atom_id, line) in enumerate(atom_lines):
        fields = pdb_fields(line)
        v = g.add_vertex()
        vertex_types[v] = type_identifier_for(
            fields[11].strip().upper(),
            get_valence(atom_id),
        )
        vertices.append(v)

    for (i, j) in connects:
        g.add_edge(vertices[i], vertices[j])

    return g
Esempio n. 17
0
def clean_up(g, seg_index):
    #plot_matrix(np.triu(adjacency_matrix(g)), "results/clean0.png")
    #graph_draw(g, output_size=(1000, 1000), output="results/clean_up0.pdf")
    
    seg_combos = get_segment_combos(g, seg_index)
    best = sorted(seg_combos.items(), key=lambda c: c[1], reverse=True)#[:200]
    #print(best)
    best = best[0][0]
    #print(best)
    
    #print(edges[:100])
    reduced = Graph(directed=False)
    reduced.add_vertex(len(g.get_vertices()))
    edges = g.get_edges([seg_index])
    edges = edges[np.where(np.isin(edges[:,2], best))]
    reduced.add_edge_list(edges)
    #print(reduced)
    #plot_matrix(np.triu(adjacency_matrix(reduced)), "results/cleani2.png")
    #graph_draw(reduced, output_size=(1000, 1000), output="results/clean_up1.pdf")
    return reduced
Esempio n. 18
0
def vary_tasks(f):

    num_pipelines = 1
    num_stages = 1
    num_tasks = [1, 10, 100, 1000, 10000, 100000]

    for tasks in num_tasks:
        print 'starting'

        start = time.time()

        # Create empty set of pipes which is equivalent to the entire application
        set_of_pipes = set()
        for pipe in range(num_pipelines):

            # Create empty graph for each pipe
            Gpipe = Graph()

            for stage in range(num_stages):

                # Create a set of tasks to be added to each stage
                set_of_tasks = frozenset([Kernel() for _ in range(tasks)])

                cur_stage = set_of_tasks

                # Add current stage to current pipe
                Gpipe.add_vertex(cur_stage)

            # Add current pipe to set of pipes
            set_of_pipes.add(Gpipe)

        end = time.time()

        f.write('pipes: %s, stages: %s, tasks: %s, time: %s\n' %
                (num_pipelines, num_stages, tasks, end - start))
        print 'pipes: %s, stages: %s, tasks: %s, time: %s\n' % (
            num_pipelines, num_stages, tasks, end - start)
Esempio n. 19
0
def build_graph(m_codes, m_list):
    n_models, n_attributes = m_codes.shape

    g = Graph()

    v_map = {}
    names = g.new_vertex_property("object")

    v_atts = g.add_vertex(n_attributes)
    v_mods = g.add_vertex(n_models)
    v_imps = g.add_vertex(n_attributes)

    for v_idx, v in enumerate(v_atts):
        v_n = v_name(v_idx, kind="data")
        v_map[v_n] = int(v)
        names[v] = v_n

    for v_idx, v in enumerate(v_mods):
        v_n = v_name(v_idx, kind="model")
        v_map[v_n] = int(v)
        names[v] = v_n

        in_edges = ((d, v) for d in m_list[v_idx].desc_ids)
        out_edges = ((v, t) for t in m_list[v_idx].targ_ids)

        g.add_edge_list(in_edges)
        g.add_edge_list(out_edges)

    for v_idx, v in enumerate(v_imps):
        v_n = v_name(v_idx, kind="imputation")
        v_map[v_n] = int(v)
        names[v] = v_n

    g.vp.names = names
    g.v_map = v_map
    return g
    class __Graph__:

        def __init__(self):
            self.graph = GT_Graph()
            self.cookies = dict()
            self.cookierecvr = CookieRecvr(self)
            self.cookierecvr.start()

        def new_cookie(self, cookie):
            self.cookies[cookie['cid']] = self.graph.add_vertex()
            logging.info('added cookie {} to graph'.format(cookie['cid']))
            for parent in cookie['parents']:
                try:
                    self.graph.add_edge(self.cookies[parent],
                                        self.cookies[cookie['cid']])
                    logging.info(
                        'added eddge from cookie {} to graph'.format(parent))
                except KeyError:
                    logging.info('parent not known in graph')
Esempio n. 21
0
def fasttest(f):
    print >> sys.stderr, "Building graph"
    nodes, edges, tags = get_graph(f)
    print >> sys.stderr, "%i nodes, %i edges" % (len(nodes), len(edges))

    from graph_tool.all import Graph

    g = Graph()
    node_map = {}

    print "Adding vertices"
    for osm_id in nodes.iterkeys():
        node_map[osm_id] = g.add_vertex()

    print "Adding edges"
    for a, b in edges:
        try:
            g.add_edge(node_map[a], node_map[b])
        except KeyError:
            continue
Esempio n. 22
0
def to_gt(db):
    """Convert db to graph-tool representation"""
    from graph_tool.all import Graph

    graph = Graph(directed=True)

    mapping = dict()

    for native in db.query(vertices, get)():
        vertex = graph.add_vertex()
        mapping[native.uid] = graph.vertex_index[vertex]

    for native in db.query(edges, get)():
        start = native.start().uid
        start = mapping[start]
        end = native.end().uid
        end = mapping[end]
        graph.add_edge(start, end)

    return graph
Esempio n. 23
0
def to_gt(db):
    """Convert db to graph-tool representation"""
    from graph_tool.all import Graph

    graph = Graph(directed=True)

    mapping = dict()

    for native in db.query(vertices, get)():
        vertex = graph.add_vertex()
        mapping[native.uid] = graph.vertex_index[vertex]

    for native in db.query(edges, get)():
        start = native.start().uid
        start = mapping[start]
        end = native.end().uid
        end = mapping[end]
        graph.add_edge(start, end)

    return graph
Esempio n. 24
0
def vytvořím_graph_tool_graf():
    from graph_tool.all import Graph
    
    graf = Graph()
    u1 = graf.add_vertex()
    u2 = graf.add_vertex()
    graf.add_edge(u1,  u2)
    
    vprop_double = graf.new_vertex_property("double")            # Double-precision floating point
    vprop_double[graf.vertex(1)] = 3.1416

    vprop_vint = graf.new_vertex_property("vector<int>")         # Vector of ints
    vprop_vint[graf.vertex(0)] = [1, 3, 42, 54]

    eprop_dict = graf.new_edge_property("object")                # Arbitrary python object. In this case, a dictionary.
    eprop_dict[graf.edges().next()] = {"foo": "bar", "gnu": 42}

    gprop_bool = graf.new_graph_property("bool")                  # Boolean
    gprop_bool[graf] = True
    
    graf.save('./data/graph_tool.graphml',  fmt='xml')
Esempio n. 25
0
def session_draw_bis(sessions_id, weblog, weblog_columns_dict):
    """
    Draw the graph of sessions with sessions_id given in entry
    """
    from graph_tool.all import Graph
    from graph_tool.all import graph_draw
    session = weblog[weblog.session_id == sessions_id]
    session = session.rename(index=str,columns = {weblog_columns_dict['requested_page_column']:'requested_page',\
                                                  weblog_columns_dict['referrer_page_column']:'referrer_page'})
    s_pages = session['requested_page']
    s_pages_ref = session['referrer_page']
    #s_pages_ref = s_pages_ref.rename(index = str, columns = {'referrer_page':'requested_page'})
    s_pages = s_pages.append(s_pages_ref)
    s_pages.drop_duplicates(inplace=True)
    g = Graph()
    v = {}
    for page in s_pages.values:
        v[page] = g.add_vertex()

    session.apply(
        lambda x: g.add_edge(v[x.referrer_page], v[x.requested_page]), axis=1)
    graph_draw(g, output="../graph_dump/_session" + str(sessions_id) + ".png")
    return
Esempio n. 26
0
class UNISrt(object):
    '''
    This is the class represents UNIS in local runtime environment (local to the apps).
    All UNIS models defined in the periscope/settings.py will be represented as
    a corresponding item of the 'resources' list in this class.
    At the initialization phase, UNISrt will create an cache of the UNIS db, (and
    will maintain it consistent in a best-effort manner).
    '''
    
    # should move this methods to utils
    def validate_add_defaults(self, data):
        if "$schema" not in data:
            return None
        schema = self._schemas.get(data["$schema"])
        validictory.validate(data, schema)
        add_defaults(data, schema)
        
    def __init__(self):
        logger.info("starting UNIS Network Runtime Environment...")
        fconf = get_file_config(nre_settings.CONFIGFILE)
        self.conf = deepcopy(nre_settings.STANDALONE_DEFAULTS)
        merge_dicts(self.conf, fconf)
        
        self.unis_url = str(self.conf['properties']['configurations']['unis_url'])
        self.ms_url = str(self.conf['properties']['configurations']['ms_url'])
        self._unis = unis_client.UNISInstance(self.conf)
        self.time_origin = int(time())
        
        self._schemas = SchemaCache()
        self._resources = self.conf['resources']
        
        self._subunisclient = {}
        
        for resource in self._resources:
            setattr(self, resource, {'new': {}, 'existing': {}})
        
        # construct the hierarchical representation of the network
        for resource in self._resources:
            # only pullRuntime once at the beginning, as pubsub will only update
            # them later when resources are modified on the server
            self.pullRuntime(self, self._unis, self._unis.get(resource), resource, False)
        
        # construct the graph representation of the network, of which this NRE is in charge
        self.g = Graph()
        self.nodebook = {}
        for key in self.nodes['existing'].keys():
            self.nodebook[key] = self.g.add_vertex()
        
        for key, link in self.links['existing'].iteritems():
            if hasattr(link, 'src') and hasattr(link, 'dst'):
                self.g.add_edge(self.nodebook[link.src.node.selfRef],\
                                self.nodebook[link.dst.node.selfRef], add_missing=False)
        
    def pullRuntime(self, mainrt, currentclient, data, resource_name, localnew):
        '''
        this function should convert the input data into Python runtime objects
        '''
        model = resources_classes[resource_name]
        
        print resource_name
        if data and 'redirect' in data and 'instances' in data:
            if len(data['instances']) == 0:
                return
            
            for instance_url in data['instances']:
                # TODO: needs SSL, not figured out yet, pretend it does not exist for now
                if instance_url == 'https://dlt.crest.iu.edu:9000' or instance_url == 'http://iu-ps01.crest.osris.org:8888'\
                                    or instance_url == 'http://dev.crest.iu.edu:8888' or instance_url == 'http://unis.crest.iu.edu:8890'\
                                    or instance_url == 'http://monitor.crest.iu.edu:9000' or instance_url == 'http://sc-ps01.osris.org:8888': 
                    continue
                
                if instance_url not in self._subunisclient:
                    conf_tmp = deepcopy(self.conf)
                    conf_tmp['properties']['configurations']['unis_url'] = instance_url
                    conf_tmp['properties']['configurations']['ms_url'] = instance_url # assume ms is the same as unis
                    self._subunisclient[instance_url] = unis_client.UNISInstance(conf_tmp)
                
                unis_tmp = self._subunisclient[instance_url]
                
                self.pullRuntime(mainrt, unis_tmp, unis_tmp.get(resource_name), resource_name, False)
                    
        elif data and isinstance(data, list):
            # sorting: in unisrt res dictionaries, a newer record of same index will be saved
            data.sort(key=lambda x: x.get('ts', 0), reverse=False)
            for v in data:
                model(v, mainrt, currentclient, localnew)
                
            threading.Thread(name=resource_name + '@' + currentclient.config['unis_url'],\
                             target=self.subscribeRuntime, args=(resource_name, self._unis,)).start()
        
    def pushRuntime(self, resource_name):
        '''
        this function upload specified resource to UNIS
        '''
        def pushEntry(k, entry):
            data = entry.prep_schema()
            groups = data['selfRef'].split('/')
            unis_str = '/'.join(groups[:3])
            if unis_str in self._subunisclient:
                uc = self._subunisclient[unis_str]
            else:
                uc = self._unis
            
            # use attribute "ts" to indicate an object downloaded from UNIS, and
            # only UPDATE the values of this kind of objects.
            if hasattr(entry, 'ts'):
                url = '/' + resource_name + '/' + getattr(entry, 'id')
                uc.put(url, data)
            else:
                url = '/' + resource_name
                uc.post(url, data)
                
        while True:
            try:
                key, value = getattr(self, resource_name)['new'].popitem()
                
                if not isinstance(value, list):
                    pushEntry(key, value)
                else:
                    for item in value:
                        pushEntry(key, item)
                    
            except KeyError:
                return
    
    def subscribeRuntime(self, resource_name, currentclient):
        '''
        subscribe a channel(resource) to UNIS, and listen for any new updates on that channel
        '''
        #name = resources_subscription[resource_name]
        name = resource_name
        model = resources_classes[resource_name]
        
        #url = self.unis_url.replace('http', 'ws', 1)
        unis_url = currentclient.config['unis_url']
        url = unis_url.replace('http', 'ws', 1)
        url = url + '/subscribe/' + name
        
        ws = create_connection(url)
        
        data = ws.recv()
        while data:
            model(json.loads(data), self, currentclient, False)
            data = ws.recv()
        ws.close()

    def poke_data(self, query):
        '''
        try to address this issue:
        - ms stores lots of data, and may be separated from unis
        - this data is accessible via /data url. They shouldn't be kept on runtime environment (too much)
        - however, sometimes they may be needed. e.g. HELM schedules traceroute measurement, and needs the
          results to schedule following iperf tests
        '''
        return self._unis.get('/data/' + query)
    
    def post_data(self, data):
        '''
        same as poke_data, the other way around
        '''
        #headers = self._def_headers("data")
        print data
        return self._unis.pc.do_req('post', '/data', data)#, headers)
Esempio n. 27
0
def main():
    options = parse_arguments()

    clusters_enabled = options["mapping_location"] is not None

    if options["should_merge"] and not clusters_enabled:
        raise "You need to provide a mapping to use merged view.`"

    # Get the string containing the input matrix form a file/pipe
    if options["matrix_location"] is not None:
        with open(options["matrix_location"], 'r') as file:
            matrix_string = file.read().strip().split("\n")
    else:
        matrix_string = sys.stdin

    # Parse the input matrix string
    height, width, matrix = parse_matrix(matrix_string, options["has_size"])

    # Get the string containing the mapping if specified
    if clusters_enabled:
        with open(options["mapping_location"], 'r') as file:
            mapping_string = file.read().strip()
        mapping = parse_mapping(mapping_string, options["has_size"])
    else:
        mapping = None

    if options["should_merge"]:
        height, width, matrix, mapping = merge_clusters(matrix, mapping)

    graph = Graph()
    graph.add_vertex(height + width)

    shape = graph.new_vertex_property("string")
    color = graph.new_vertex_property("string")
    index = graph.new_vertex_property("string")

    for i in range(height):
        v = graph.vertex(i)
        shape[v] = "square"
        color[v] = "red"
        index[v] = str(i)

    for i in range(width):
        v = graph.vertex(height + i)
        shape[v] = "circle"
        if clusters_enabled:
            color[v] = COLORS[mapping[i] % len(COLORS)]
        else:
            color[v] = COLORS[0]
        index[v] = str(i)

    for i in range(height):
        for j in range(width):
            if abs(matrix[i][j]) < EPSILON:
                continue
            graph.add_edge(graph.vertex(i), graph.vertex(height + j))

    graph.set_directed(False)

    if clusters_enabled:
        groups = graph.new_vertex_property("int")
        for i in range(width):
            v = graph.vertex(height + i)
            groups[v] = mapping[i]
        position = sfdp_layout(graph, groups=groups)
    else:
        position = None

    graph_draw(graph,
               pos=position,
               vertex_text=index,
               vertex_shape=shape,
               vertex_fill_color=color,
               vertex_pen_width=1.2,
               vertex_color="black",
               edge_pen_width=3.4,
               fit_view=True,
               bg_color=(255, 255, 255, 1),
               output=options["output_file"])
ver_id = pairs_graph.new_vertex_property("int")
for line in f:
    spl_line = line.split(' ')

    if len(spl_line) == 1:
        continue

    pos = int(spl_line[0])
    neg = int(spl_line[1])
    cur_weight = pos + coefficient * neg

    w1 = spl_line[2].strip(' \n\uefef')
    w2 = spl_line[3].strip(' \n\uefef')

    if w1 not in word_dict:
        v1 = pairs_graph.add_vertex()
        ver_id[v1] = pairs_graph.vertex_index[v1]
        word_dict[w1] = ver_id[v1]
        ver_names[v1] = w1
    else:
        v1 = pairs_graph.vertex(word_dict[w1])

    if w2 not in word_dict:
        v2 = pairs_graph.add_vertex()
        ver_id[v2] = pairs_graph.vertex_index[v2]
        word_dict[w2] = ver_id[v2]
        ver_names[v2] = w2
    else:
        v2 = pairs_graph.vertex(word_dict[w2])

    if cur_weight == 0:
Esempio n. 29
0
from graph_tool.all import Graph, graph_draw, fruchterman_reingold_layout
import json

network = json.load(open('../FRRNetwork.json', 'r'))

g = Graph(directed=False)

vprop_text = g.new_vertex_property("string")
vprop_color = g.new_vertex_property("int")
vprop_size = g.new_vertex_property("int")
vprop_shape = g.new_vertex_property("string")
name_to_vertex = {}

for switch in network['switches']:
    v_switch = g.add_vertex()
    name_to_vertex[switch['name']] = v_switch
    vprop_text[v_switch] = switch['name']
    vprop_color[v_switch] = 1
    vprop_size[v_switch] = 50
    vprop_shape[v_switch] = "hexagon"
    for host in switch['hosts']:
        v_host = g.add_vertex()
        e_link = g.add_edge(v_switch, v_host)
        vprop_text[v_host] = host['name']
        vprop_color[v_host] = 100
        vprop_size[v_host] = 40
        vprop_shape[v_host] = "circle"
        name_to_vertex[host['name']] = v_host
for link in network['switch_links']:
    v_node1 = name_to_vertex[link['node1']['name']]
    v_node2 = name_to_vertex[link['node2']['name']]
ver_id = pairs_graph.new_vertex_property("int")
for line in f:
    spl_line = line.split(' ')

    if len(spl_line) == 1:
        continue

    pos = int(spl_line[0])
    neg = int(spl_line[1])
    cur_weight = pos + coefficient * neg

    w1 = spl_line[2].strip(' \n\uefef')
    w2 = spl_line[3].strip(' \n\uefef')

    if w1 not in word_dict:
        v1 = pairs_graph.add_vertex()
        ver_id[v1] = pairs_graph.vertex_index[v1]
        word_dict[w1] = ver_id[v1]
        ver_names[v1] = w1
    else:
        v1 = pairs_graph.vertex(word_dict[w1])

    if w2 not in word_dict:
        v2 = pairs_graph.add_vertex()
        ver_id[v2] = pairs_graph.vertex_index[v2]
        word_dict[w2] = ver_id[v2]
        ver_names[v2] = w2
    else:
        v2 = pairs_graph.vertex(word_dict[w2])

    if cur_weight == 0:
def build_closure(g, cand_source, terminals, infection_times, k=-1,
                  strictly_smaller=True,
                  debug=False,
                  verbose=False):
    """
    build a clojure graph in which cand_source + terminals are all connected to each other.
    the number of neighbors of each node is determined by k

    the larger the k, the denser the graph"""
    r2pred = {}
    edges = {}
    terminals = list(terminals)

    # from cand_source to terminals
    vis = init_visitor(g, cand_source)
    cpbfs_search(g, source=cand_source, visitor=vis, terminals=terminals,
                 forbidden_nodes=terminals,
                 count_threshold=k)
    r2pred[cand_source] = vis.pred
    for u, v, c in get_edges(vis.dist, cand_source, terminals):
        edges[(u, v)] = c

    if debug:
        print('cand_source: {}'.format(cand_source))
        print('#terminals: {}'.format(len(terminals)))
        print('edges from cand_source: {}'.format(edges))

    if verbose:
        terminals_iter = tqdm(terminals)
        print('building closure graph')
    else:
        terminals_iter = terminals

    # from terminal to other terminals
    for root in terminals_iter:

        if strictly_smaller:
            late_terminals = [t for t in terminals
                              if infection_times[t] > infection_times[root]]
        else:
            # respect what the paper presents
            late_terminals = [t for t in terminals
                              if infection_times[t] >= infection_times[root]]

        late_terminals = set(late_terminals) - {cand_source}  # no one can connect to cand_source
        if debug:
            print('root: {}'.format(root))
            print('late_terminals: {}'.format(late_terminals))
        vis = init_visitor(g, root)
        cpbfs_search(g, source=root, visitor=vis, terminals=list(late_terminals),
                     forbidden_nodes=list(set(terminals) - set(late_terminals)),
                     count_threshold=k)
        r2pred[root] = vis.pred
        for u, v, c in get_edges(vis.dist, root, late_terminals):
            if debug:
                print('edge ({}, {})'.format(u, v))
            edges[(u, v)] = c

    if verbose:
        print('returning closure graph')

    gc = Graph(directed=True)

    for _ in range(g.num_vertices()):
        gc.add_vertex()

    for (u, v) in edges:
        gc.add_edge(u, v)

    eweight = gc.new_edge_property('int')
    eweight.set_2d_array(np.array(list(edges.values())))
    # for e, c in edges.items():
    #     eweight[e] = c
    return gc, eweight, r2pred
def build_region_closure(g, root, regions, infection_times, obs_nodes, debug=False):
    """return a closure graph on the the components"""
    regions = copy(regions)
    root_region = {'nodes': {root}, 'head': root, 'head_time': -float('inf')}
    regions[len(regions)] = root_region

    gc = Graph(directed=True)
    for _ in range(len(regions)):
        gc.add_vertex()

    # connect each region
    gc_edges = []
    original_edge_info = {}
    for i, j in combinations(regions, 2):
        # make group i the one with *later* head
        if regions[i]['head_time'] < regions[j]['head_time']:
            i, j = j, i
        
        if debug:
            print('i, j={}, {}'.format(i, j))
        # only need to connect head i to one of the nodes in group j
        # where nodes in j have time stamp < head i
        # then an edge from region j to region i (because j is earlier)

        head_i = regions[i]['head']
        
        def get_pseudo_time(n):
            if n == root:
                return - float('inf')
            else:
                return infection_times[n]

        targets = [n for n in regions[j]['nodes'] if get_pseudo_time(n) < regions[i]['head_time']]

        if debug:
            print('head_i: {}'.format(head_i))
            print('targets: {}'.format(targets))
            print('regions[j]["nodes"]: {}'.format(regions[j]['nodes']))
 
        if len(targets) == 0:
            continue
            
        visitor = init_visitor(g, head_i)
        forbidden_nodes = list(set(regions[i]['nodes']) | (set(regions[j]['nodes']) - set(targets)))

        if debug:
            print('forbidden_nodes: {}'.format(forbidden_nodes))
            
        # NOTE: count_threshold = 1
        cpbfs_search(g, source=head_i,
                     terminals=targets,
                     forbidden_nodes=forbidden_nodes,
                     visitor=visitor,
                     count_threshold=1)
    
        reachable_targets = [t for t in targets if visitor.dist[t] > 0]

        if debug:
            print('reachable_targets: {}'.format(reachable_targets))
            
        if len(reachable_targets) == 0:
            # cannot reach there
            continue

        source = min(reachable_targets, key=visitor.dist.__getitem__)
        dist = visitor.dist[source]

        assert dist > 0

        gc_edges.append(((j, i, dist)))
        original_edge_info[(j, i)] = {
            'dist': dist,
            'pred': visitor.pred,
            'original_edge': (source, head_i)
        }
    for u, v, _ in gc_edges:
        gc.add_edge(u, v)

    eweight = gc.new_edge_property('int')
    for u, v, c in gc_edges:
        eweight[gc.edge(gc.vertex(u), gc.vertex(v))] = c

    return gc, eweight, original_edge_info
Esempio n. 33
0
    def path_from_income(self):
        """
        :return: GeoDataFrame representing path of commuter to work
        """

        # Get jobs within range of income of commuter
        streets = self.env.streets.copy().reset_index(drop=True)
        origins = self.env.origins.copy()
        destinations = self.env.destinations.copy()
        destinations = destinations[
            (destinations['salary_n'] > self.income[0])
            & (destinations['salary_n'] < self.income[1])]

        # Get shortest path to one random destination
        osm_g = Graph(directed=False)
        indices = {}
        if len(destinations) > 0:

            # Add vertices to graph
            for i, osm_id in enumerate(
                    list(streets['from']) + list(streets['to'])):
                v = osm_g.add_vertex()
                v.index = int(i)
                indices[osm_id] = i

            # Add edges to graph
            for i in list(streets.index):
                o_osm = streets.at[i, 'from']
                d_osm = streets.at[i, 'to']
                osm_g.add_edge(indices[o_osm], indices[d_osm])

            # Randomly choose destination
            destination = destinations.loc[
                np.random.choice(list(destinations.index)), :]

            # Randomly choose origin parcel based on block id
            origins = origins[
                (origins['Landuse'].isin(['MFH', 'MFL', 'SFA', 'SFD', 'MX']))
                & (origins['index_block'] == self.block_id)].reset_index(
                    drop=True)
            if len(origins) > 0:
                origin = origins.loc[np.random.choice(list(origins.index)), :]

                # Get closest street of origin and destination
                osm_origin = streets[streets.centroid == nearest_points(
                    origin['geometry'], streets.centroid.unary_union)[1]]
                osm_destination = streets[streets.centroid == nearest_points(
                    destination['geometry'], streets.centroid.unary_union)[1]]

                # Calculate shortest path
                def keys(dictA, value):
                    return list(dictA.keys())[list(
                        dictA.values()).index(value)]

                path = shortest_path(
                    osm_g, indices[osm_origin['from'].values[0]],
                    indices[osm_destination['to'].values[0]])[1]
                path_gdf = pd.concat([
                    streets[
                        (streets['from'] == keys(indices, int(edge.source())))
                        & (streets['to'] == keys(indices, int(edge.target())))]
                    for edge in path
                ])

                return path_gdf
            else:
                return None
        else:
            return None
def gen_graph((repo, events)):
    graph = Graph()

    repo_on_graph = graph.new_graph_property('string')
    repo_on_graph[graph] = repo
    graph.graph_properties['repo_on_graph'] = repo_on_graph

    language_on_graph = graph.new_graph_property('string')
    language_on_graph[graph] = events[0]['language']
    graph.graph_properties['language_on_graph'] = language_on_graph

    events_on_vertices = graph.new_vertex_property('object')
    graph.vertex_properties['events_on_vertices'] = events_on_vertices

    actors_on_vertices = graph.new_vertex_property('string')
    graph.vertex_properties['actors_on_vertices'] = actors_on_vertices

    weights_on_edges = graph.new_edge_property('long double')
    graph.edge_properties['weights_on_edges'] = weights_on_edges

    # pre_vertices = []
    pre_events_map = {}
    pre_vertices_map = {}

    # owner_vertex = graph.add_vertex()
    # owner = repo.split('/')[0]
    # actors_on_vertices[owner_vertex] = owner
    # pre_vertices_map[owner] = owner_vertex

    events = sorted(events, key=lambda x: x['created_at'])

    for event in events:
        actor = event['actor']

        if actor in pre_events_map:
            continue

        created_at = event['created_at']

        vertex = graph.add_vertex()
        events_on_vertices[vertex] = event
        actors_on_vertices[vertex] = actor

        if 'actor-following' not in event:
            continue

        following = set(event['actor-following'])
        commons = following.intersection(pre_vertices_map.keys())

        # pre_vertices.append(vertex)

        # if len(commons) == 0:
        #     edge = graph.add_edge(vertex, owner_vertex)
        #     weights_on_edges[edge] = 1.0

        for pre_actor in commons:
            edge = graph.add_edge(vertex, pre_vertices_map[pre_actor])
            interval =\
                (created_at - pre_events_map[pre_actor]['created_at']).days
            weight = 1.0 / fib(interval + 2)
            weights_on_edges[edge] = weight

        pre_events_map[actor] = event
        pre_vertices_map[actor] = vertex

    return graph
Esempio n. 35
0
class Workflow:
    def __init__(self, edges, weights):
        self.edges = edges
        self.graph = Graph()
        self.size = len(edges['target'])
        self.graph.add_vertex(self.size)
        self.weights = weights

        # init weights part
        self.graph.vp.weights = self.graph.new_vertex_property('int16_t')
        for index in range(0, self.size):
            self.graph.vp.weights[index] = weights[index]

        for source in self.edges['source'].keys():
            for target in self.edges['source'][source]:
                self._add_edge(source, target)

        self.depth_per_node = {x: 0 for x in range(0, self.size)}
        self.accum_weights = {x: 0 for x in range(0, self.size)}
        self.find_depth()
        self.find_accum_weights(self.size - 1)
        self.depth = {x: [] for x in set(self.depth_per_node.values())}

        for node, depth in self.depth_per_node.items():
            self.depth[depth].append(node)

        self.routes_t = {}
        self.find_routes(self.size - 1, 0, self.routes_t)

        self.routes = []
        self.transpose_routes(self.size - 1, self.routes_t[self.size - 1])

    def _add_edge(self, source, target):
        self.graph.add_edge(self.graph.vertex(source),
                            self.graph.vertex(target))

    def show(self, size=1500):
        return graph_draw(self.graph,
                          vertex_text=self.graph.vertex_index,
                          vertex_font_size=18,
                          output_size=(size, size),
                          output="graph.png")

    def find_accum_weights(self, actual_node, accum_weight=0):
        already_accum_weight = self.accum_weights[actual_node]
        self.accum_weights[actual_node] = max(
            already_accum_weight, accum_weight + self.weights[actual_node])

        for fathers in self.edges['target'][actual_node]:
            self.find_accum_weights(fathers, self.accum_weights[actual_node])

    def find_depth(self, actual_node=0, actual_depth=0):
        self.depth_per_node[actual_node] = max(
            self.depth_per_node[actual_node], actual_depth)
        for next_node in self.edges['source'][actual_node]:
            self.find_depth(next_node, actual_depth + 1)

    def find_routes(self, actual_node, weight=0, routes={}):
        weight += self.weights[actual_node]
        if actual_node != 0:
            routes[actual_node] = {}
            for fathers in self.edges['target'][actual_node]:
                self.find_routes(fathers, weight, routes[actual_node])
        else:
            routes[actual_node] = weight

    def transpose_routes(self, actual_node, routes, path=[]):
        if actual_node != 0:
            path = path.copy()
            path.append(actual_node)
            for child in routes.keys():
                self.transpose_routes(child, routes[child], path)
        else:

            self.routes.append({'path': path, 'weight': routes})

    def find_cycles(self):
        visited = [False for _ in range(0, self.size)]

        return self.find_cycles_helper(0, [])

    def find_cycles_helper(self, actual_node, rec_list):
        if actual_node in rec_list:
            print(rec_list, actual_node)
            return True

        call_this = []
        for child in self.edges['source'][actual_node]:
            new_rec_list = rec_list.copy()
            new_rec_list.append(actual_node)
            call_this.append([child, new_rec_list])

        return any([self.find_cycles_helper(x[0], x[1]) for x in call_this])
def steiner_tree_mst(g, root, infection_times, source, terminals,
                     closure_builder=build_closure,
                     strictly_smaller=True,
                     return_closure=False,
                     k=-1,
                     debug=False,
                     verbose=True):
    gc, eweight, r2pred = closure_builder(g, root, terminals,
                                          infection_times,
                                          strictly_smaller=strictly_smaller,
                                          k=k,
                                          debug=debug,
                                          verbose=verbose)

    # get the minimum spanning arborescence
    # graph_tool does not provide minimum_spanning_arborescence
    if verbose:
        print('getting mst')
    gx = gt2nx(gc, root, terminals, edge_attrs={'weight': eweight})
    try:
        nx_tree = nx.minimum_spanning_arborescence(gx, 'weight')
    except nx.exception.NetworkXException:
        if debug:
            print('fail to find mst')
        if return_closure:
            return None, gc, None
        else:
            return None

    if verbose:
        print('returning tree')

    mst_tree = Graph(directed=True)
    for _ in range(g.num_vertices()):
        mst_tree.add_vertex()

    for u, v in nx_tree.edges():
        mst_tree.add_edge(u, v)

    if verbose:
        print('extract edges from original graph')

    # extract the edges from the original graph

    # sort observations by time
    # and also topological order
    topological_index = {}
    for i, e in enumerate(bfs_iterator(mst_tree, source=root)):
        topological_index[int(e.target())] = i
    sorted_obs = sorted(
        set(terminals) - {root},
        key=lambda o: (infection_times[o], topological_index[o]))

    tree_nodes = {root}
    tree_edges = set()
    # print('root', root)
    for u in sorted_obs:
        if u in tree_nodes:
            if debug:
                print('{} covered already'.format(u))
            continue
        # print(u)
        v, u = map(int, next(mst_tree.vertex(u).in_edges()))  # v is ancestor
        tree_nodes.add(v)

        late_nodes = [n for n in terminals if infection_times[n] > infection_times[u]]
        vis = init_visitor(g, u)
        # from child to any tree node, including v

        cpbfs_search(g, source=u, terminals=list(tree_nodes),
                     forbidden_nodes=late_nodes,
                     visitor=vis,
                     count_threshold=1)
        # dist, pred = shortest_distance(g, source=u, pred_map=True)
        node_set = {v for v, d in vis.dist.items() if d > 0}
        reachable_tree_nodes = node_set.intersection(tree_nodes)
        ancestor = min(reachable_tree_nodes, key=vis.dist.__getitem__)

        edges = extract_edges_from_pred(g, u, ancestor, vis.pred)
        edges = {(j, i) for i, j in edges}  # need to reverse it
        if debug:
            print('tree_nodes', tree_nodes)
            print('connecting {} to {}'.format(v, u))
            print('using ancestor {}'.format(ancestor))
            print('adding edges {}'.format(edges))
        tree_nodes |= {u for e in edges for u in e}

        tree_edges |= edges

    t = Graph(directed=True)
    for _ in range(g.num_vertices()):
        t.add_vertex()

    for u, v in tree_edges:
        t.add_edge(t.vertex(u), t.vertex(v))

    tree_nodes = {u for e in tree_edges for u in e}
    vfilt = t.new_vertex_property('bool')
    vfilt.a = False
    for v in tree_nodes:
        vfilt[t.vertex(v)] = True

    t.set_vertex_filter(vfilt)

    if return_closure:
        return t, gc, mst_tree
    else:
        return t
Esempio n. 37
0
class Network:
    def __init__(self, nodes_info=None, links_info=None, file_name=None):
        self.g = Graph()

        if nodes_info and links_info:
            self.nodes_info = nodes_info
            self.links_info = links_info
            self.g.vertex_properties["name"] = self.g.new_vertex_property(
                'string')
            self.g.vertex_properties["id"] = self.g.new_vertex_property(
                'int32_t')
            self.g.edge_properties["weight"] = self.g.new_edge_property(
                'int32_t')

            self.create_network()
            self.g.vertex_properties["pagerank"] = pagerank(
                self.g, weight=self.g.edge_properties["weight"])
            self.g.vertex_properties[
                "degree_centrality"] = self.degree_centrality()

        elif file_name:
            self.load_network(file_name)

    def create_network(self):
        # Add Nodes
        for node in self.nodes_info:
            self.add_n(node)

        # Add Links
        for link in self.links_info:
            n_loser = 0
            n_winner = 0
            loser = link['loser']
            winner = link['winner']
            weight = link['rounds']

            for team_id in self.g.vertex_properties.id:
                if loser == team_id:
                    break
                n_loser += 1

            for team_id in self.g.vertex_properties.id:
                if winner == team_id:
                    break
                n_winner += 1

            self.add_l(n_loser, n_winner, 16 / weight * 100)

    def load_network(self, file_name):
        new_file_name = '..' + sep + '..' + sep + 'network-graphs' + sep + file_name
        self.g.load(new_file_name, fmt="gt")

    def get_normalized_pagerank(self):
        max_pgr = 0
        for pgr in self.g.vertex_properties.pagerank:
            if pgr > max_pgr:
                max_pgr = pgr

        return [
            self.g.vertex_properties.pagerank[v] / max_pgr
            for v in self.g.vertices()
        ]

    def add_n(self, node_info):
        n = self.g.add_vertex()
        self.g.vertex_properties.id[n] = node_info['id']
        self.g.vertex_properties.name[n] = node_info['Team_Name']

    def add_l(self, loser, winner, weight):
        n1 = self.g.vertex(loser)
        n2 = self.g.vertex(winner)
        l = self.g.add_edge(n1, n2)
        self.g.edge_properties.weight[l] = weight

    def draw(self, output_file, fmt):
        graph_draw(self.g,
                   vertex_text=self.g.vertex_index,
                   output=output_file,
                   fmt=fmt)

    def save_network(self, file_name):
        try:
            new_file_name = '..' + sep + '..' + sep + 'network-graphs' + sep + file_name
            self.g.save(new_file_name, fmt="gt")
        except:
            return False
        return True

    def vp_pagerank(self):
        return self.g.vertex_properties.pagerank

    def vp_degree_cent(self):
        return self.g.vertex_properties.degree_centrality

    def vp_name(self):
        return self.g.vertex_properties.name

    def vp_id(self):
        return self.g.vertex_properties.id

    def ep_weight(self):
        return self.g.edge_properties.weight

    # Calcula as características básicas da rede
    def get_basic_info(self):
        info = {}

        try:
            n_vertices = self.g.num_vertices()
            n_edges = self.g.num_edges()
            density = n_edges / ((n_vertices * (n_vertices - 1)) / 2)
            mean_degree = (2 * n_edges) / n_vertices

            # Cálculo do coeficiente de clusterização "na mão", usando a média dos
            # coeficientes locais calculados pela Graph Tools
            local_cc = local_clustering(self.g)
            clustering_coef = fsum(
                [local_cc[x] for x in self.g.vertices() if local_cc[x] != 0.0])
            clustering_coef /= n_vertices

            info["Número de times"] = n_vertices
            info["Número de confrontos"] = n_edges
            info["Densidade"] = density
            info["Grau médio"] = mean_degree
            info["Coeficiente de Clusterização"] = clustering_coef
        except:
            info.clear()

        return info

    def degree_centrality(self):
        degree_centrality = self.g.new_vertex_property('float')

        for v in self.g.vertices():
            degree_centrality[v] = v.in_degree() / (self.g.num_vertices() - 1)

        return degree_centrality

    # Calcula a distribuição de graus da rede
    def degree_distribution(self):
        degree_dist = {}

        try:
            for v in self.g.vertices():
                if v.in_degree() not in degree_dist.keys():
                    degree_dist[v.in_degree()] = 1
                else:
                    degree_dist[v.in_degree()] += 1

            for k in degree_dist.keys():
                degree_dist[k] /= self.g.num_vertices()
        except:
            degree_dist.clear()

        return degree_dist
Esempio n. 38
0
class GeneralGraph():
    """
    General wrapper for graph-tool or networkx graphs to add edges and nodes
    according to constraints
    """
    def __init__(self, directed=True, verbose=1):
        self.graphtool = GRAPH_TOOL
        # Initialize graph
        if self.graphtool:
            self.graph = Graph(directed=directed)
            self.weight = self.graph.new_edge_property("float")
        else:
            if directed:
                print("directed graph")
                self.graph = nx.DiGraph()
            else:
                self.graph = nx.Graph()
        # set metaparameter
        self.time_logs = {}
        self.verbose = verbose

    def set_edge_costs(self,
                       layer_classes=["resistance"],
                       class_weights=[1],
                       **kwargs):
        """
        Initialize edge cost variables
        :param classes: list of cost categories
        :param weights: list of weights for cost categories - must be of same 
                        shape as classes (if None, then equal weighting)
        """
        class_weights = np.array(class_weights)
        # set different costs:
        self.cost_classes = layer_classes
        if self.graphtool:
            self.cost_props = [
                self.graph.new_edge_property("float")
                for _ in range(len(layer_classes))
            ]
        self.cost_weights = class_weights / np.sum(class_weights)
        if self.verbose:
            print(self.cost_classes, self.cost_weights)
        # save weighted instance for plotting
        self.instance = np.sum(
            np.moveaxis(self.cost_instance, 0, -1) * self.cost_weights,
            axis=2) * self.hard_constraints

    def set_shift(self,
                  start,
                  dest,
                  pylon_dist_min=3,
                  pylon_dist_max=5,
                  max_angle=np.pi / 2,
                  **kwargs):
        """
        Initialize shift variable by getting the donut values
        :param lower, upper: min and max distance of pylons
        :param vec: vector of diretion of edges
        :param max_angle: Maximum angle of edges to vec
        """
        vec = dest - start
        if self.verbose:
            print("SHIFT:", pylon_dist_min, pylon_dist_max, vec, max_angle)
        self.shifts = get_half_donut(pylon_dist_min,
                                     pylon_dist_max,
                                     vec,
                                     angle_max=max_angle)
        self.shift_tuples = self.shifts

    def set_corridor(self,
                     dist_surface,
                     start_inds,
                     dest_inds,
                     sample_func="mean",
                     sample_method="simple",
                     factor_or_n_edges=1):
        # set new corridor
        corridor = (dist_surface > 0).astype(int)

        self.factor = factor_or_n_edges
        self.cost_rest = self.cost_instance * (self.hard_constraints >
                                               0).astype(int) * corridor
        # downsample
        tic = time.time()
        if self.factor > 1:
            self.cost_rest = CostUtils.downsample(self.cost_rest,
                                                  self.factor,
                                                  mode=sample_method,
                                                  func=sample_func)

        self.time_logs["downsample"] = round(time.time() - tic, 3)

        # repeat because edge artifacts
        self.cost_rest = self.cost_rest * (self.hard_constraints >
                                           0).astype(int) * corridor

        # add start and end TODO ugly
        self.cost_rest[:, dest_inds[0],
                       dest_inds[1]] = self.cost_instance[:, dest_inds[0],
                                                          dest_inds[1]]
        self.cost_rest[:, start_inds[0],
                       start_inds[1]] = self.cost_instance[:, start_inds[0],
                                                           start_inds[1]]

    def add_nodes(self, nodes):
        """
        Add vertices to the graph
        param nodes: list of node names if networkx, integer if graphtool
        """
        tic = time.time()
        # add nodes to graph
        if self.graphtool:
            _ = self.graph.add_vertex(nodes)
            self.n_nodes = len(list(self.graph.vertices()))
        else:
            self.graph.add_nodes_from(np.arange(nodes))
            self.n_nodes = len(self.graph.nodes())
        # verbose
        if self.verbose:
            print("Added nodes:", nodes, "in time:", time.time() - tic)
        self.time_logs["add_nodes"] = round(time.time() - tic, 3)

    def add_edges(self):
        tic_function = time.time()

        n_edges = 0
        # kernels, posneg = ConstraintUtils.get_kernel(self.shifts,
        # self.shift_vals)
        # edge_array = []

        times_edge_list = []
        times_add_edges = []

        if self.verbose:
            print("n_neighbors:", len(self.shift_tuples))

        for i in range(len(self.shift_tuples)):

            tic_edges = time.time()

            # set cost rest if necessary (random graph)
            self.set_cost_rest()

            # compute shift and weights
            out = self._compute_edges(self.shift_tuples[i])

            # Error if -1 entries because graph-tool crashes with -1 nodes
            if np.any(out[:, :2].flatten() < 0):
                print(np.where(out[:, :2] < 0))
                raise RuntimeError

            n_edges += len(out)
            times_edge_list.append(round(time.time() - tic_edges, 3))

            # add edges to graph
            tic_graph = time.time()
            if self.graphtool:
                self.graph.add_edge_list(out, eprops=self.cost_props)
            else:
                nx_edge_list = [(e[0], e[1], {
                    "weight": np.sum(e[2:] * self.cost_weights)
                }) for e in out]
                self.graph.add_edges_from(nx_edge_list)
            times_add_edges.append(round(time.time() - tic_graph, 3))

            # alternative: collect edges here and add alltogether
            # edge_array.append(out)

        # # alternative: add edges all in one go
        # tic_concat = time.time()
        # edge_lists_concat = np.concatenate(edge_array, axis=0)
        # self.time_logs["concatenate"] = round(time.time() - tic_concat, 3)
        # print("time for concatenate:", self.time_logs["concatenate"])
        # tic_graph = time.time()
        # self.graph.add_edge_list(edge_lists_concat, eprops=[self.weight])
        # self.time_logs["add_edges"] = round(
        #     (time.time() - tic_graph) / len(shifts), 3
        # )
        self.n_edges = len(list(self.graph.edges()))
        self._update_time_logs(times_add_edges, times_edge_list, tic_function)
        if self.verbose:
            print("DONE adding", n_edges, "edges:", time.time() - tic_function)

    def _update_time_logs(self, times_add_edges, times_edge_list,
                          tic_function):
        self.time_logs["add_edges"] = round(np.mean(times_add_edges), 3)
        self.time_logs["add_edges_times"] = times_add_edges

        self.time_logs["edge_list"] = round(np.mean(times_edge_list), 3)
        self.time_logs["edge_list_times"] = times_edge_list

        self.time_logs["add_all_edges"] = round(time.time() - tic_function, 3)

        if self.verbose:
            print("Done adding edges:", len(list(self.graph.edges())))

    def sum_costs(self):
        """
        Additive weighting of costs
        Take the individual edge costs, compute weighted sum --> self.weight
        """
        # add sum of all costs
        if not self.graphtool:
            return
        tic = time.time()
        summed_costs_arr = np.zeros(self.cost_props[0].get_array().shape)
        for i in range(len(self.cost_props)):
            prop = self.cost_props[i].get_array()
            summed_costs_arr += prop * self.cost_weights[i]
        self.weight.a = summed_costs_arr

        self.time_logs["sum_of_costs"] = round(time.time() - tic, 3)

    def remove_vertices(self, dist_surface, delete_padding=0):
        """
        Remove edges in a certain corridor (or all) to replace them by
        a refined surface

        @param dist_surface: a surface where each pixel value corresponds to 
        the distance of the pixel to the shortest path
        @param delete_padding: define padding in which part of the corridor to 
        delete vertices (cannot delete all because then graph unconnected)
        """
        tic = time.time()
        self.graph.clear_edges()
        self.graph.shrink_to_fit()
        self.time_logs["remove_edges"] = round(time.time() - tic, 3)

    def get_pareto(self,
                   vary,
                   source,
                   dest,
                   out_path=None,
                   compare=[0, 1],
                   plot=1):
        """
        Arguments:
            vary: how many weights to explore
                    e.g 3 --> each cost class can have weight 0, 0.5 or 1
            source, dest: as always the source and destination vertex
            out_path: where to save the pareto figure(s)
            compare: indices of cost classes to compare
        Returns:
            paths: All found paths
            pareto: The costs for each combination of weights
        """
        tic = time.time()
        # initialize lists
        pareto = list()
        paths = list()
        cost_sum = list()
        # get the edge costs
        cost_arrs = [cost.get_array() for cost in self.cost_props]
        # [self.cost_props[comp].get_array() for comp in compare]

        # get vary weights between 0 and 1
        var_weights = np.around(np.linspace(0, 1, vary), 2)

        # construct weights array
        if len(compare) == 2:
            weights = [[v, 1 - v] for v in var_weights]
        elif len(compare) == 3:
            weights = list()
            for w0 in var_weights:
                for w1 in var_weights[var_weights <= 1 - w0]:
                    weights.append([w0, w1, 1 - w0 - w1])
        else:
            raise ValueError("argument compare can only have length 2 or 3")

        # w_avail: keep weights of non-compare classes, get leftover amount
        w_avail = np.sum(np.asarray(self.cost_weights)[compare])
        # compute paths for each combination of weights
        for j in range(len(weights)):
            # option 2: np.zeros(len(cost_arrs)) + non_compare_weight
            w = self.cost_weights.copy()
            # replace the ones we want to compare
            w[compare] = np.array(weights[j]) * w_avail

            # weighted sum of edge costs
            self.weight.a = np.sum(
                [cost_arrs[i] * w[i] for i in range(len(cost_arrs))], axis=0)
            # get shortest path
            path, path_costs, _ = self.get_shortest_path(source, dest)
            # don't take cost_sum bc this is sum of original weighting
            pareto.append(np.sum(path_costs, axis=0)[compare])
            paths.append(path)
            # take overall sum of costs (unweighted) that this w leads to
            cost_sum.append(np.sum(path_costs))

        # print best weighting
        best_weight = np.argmin(cost_sum)
        w = self.cost_weights.copy()
        w[compare] = np.array(weights[best_weight]) * w_avail
        print("Best weights:", w, "with (unweighted) costs:", np.min(cost_sum))

        self.time_logs["pareto"] = round(time.time() - tic, 3)

        pareto = np.array(pareto)
        classes = [self.cost_classes[comp] for comp in compare]
        # Plotting
        if plot:
            if len(compare) == 2:
                plot_pareto_scatter_2d(pareto,
                                       weights,
                                       classes,
                                       cost_sum=cost_sum,
                                       out_path=out_path)
            elif len(compare) == 3:
                # plot_pareto_3d(pareto, weights, classes)
                plot_pareto_scatter_3d(pareto,
                                       weights,
                                       classes,
                                       cost_sum=cost_sum,
                                       out_path=out_path)
        return paths, weights, cost_sum

    def get_shortest_path(self, source, target):
        """
        Compute shortest path from source vertex to target vertex
        """
        tic = (time.time())
        # #if source and target are given as indices:
        if self.graphtool:
            vertices_path, _ = shortest_path(self.graph,
                                             source,
                                             target,
                                             weights=self.weight,
                                             negative_weights=True)
        else:
            try:
                vertices_path = nx.dijkstra_path(self.graph, source, target)
            except nx.exception.NetworkXNoPath:
                return []

        self.time_logs["shortest_path"] = round(time.time() - tic, 3)
        return vertices_path

    def save_graph(self, OUT_PATH):
        """
        Save the graph in OUT_PATH
        """
        if self.graphtool:
            for i, cost_class in enumerate(self.cost_classes):
                self.graph.edge_properties[cost_class] = self.cost_props[i]
            self.graph.edge_properties["weight"] = self.weight
            self.graph.save(OUT_PATH + ".xml.gz")
        else:
            nx.write_weighted_edgelist(self.graph,
                                       OUT_PATH + '.weighted.edgelist')

    def load_graph(self, IN_PATH):
        """
        Retrieve graph from IN_PATH
        """
        if self.graphtool:
            self.g_prev = load_graph(IN_PATH + ".xml.gz")
            self.weight_prev = self.g_prev.ep.weight
            # weight = G2.ep.weight[G2.edge(66, 69)]
        else:
            self.g_prev = nx.read_edgelist(IN_PATH + '.weighted.edgelist',
                                           nodetype=int,
                                           data=(('weight', float), ))

    # -----------------------------------------------------------------------
    # INTERFACE

    def single_sp(self, **kwargs):
        """
        Function for full processing until shortest path
        """
        self.start_inds = kwargs["start_inds"]
        self.dest_inds = kwargs["dest_inds"]
        self.set_shift(self.start_inds, self.dest_inds, **kwargs)
        # self.set_corridor(
        #     np.ones(self.hard_constraints.shape) * 0.5,
        #     self.start_inds,
        #     self.dest_inds,
        #     factor_or_n_edges=1
        # )
        if self.verbose:
            print("1) Initialize shifts and instance (corridor)")
        self.set_edge_costs(**kwargs)
        # add vertices
        self.add_nodes()
        if self.verbose:
            print("2) Initialize distances to inf and predecessors")
        self.add_edges()
        if self.verbose:
            print("3) Compute source shortest path tree")
            print("number of vertices and edges:", self.n_nodes, self.n_edges)

        # weighted sum of all costs
        self.sum_costs()
        source_v, target_v = self.add_start_and_dest(self.start_inds,
                                                     self.dest_inds)
        # get actual best path
        path, path_costs, cost_sum = self.get_shortest_path(source_v, target_v)
        if self.verbose:
            print("4) shortest path", cost_sum)
        return path, path_costs, cost_sum
Esempio n. 39
0
def gen_graph((repo, events)):
    graph = Graph()

    repo_on_graph = graph.new_graph_property('string')
    repo_on_graph[graph] = repo
    graph.graph_properties['repo_on_graph'] = repo_on_graph

    language_on_graph = graph.new_graph_property('string')
    language_on_graph[graph] = events[0]['language']
    graph.graph_properties['language_on_graph'] = language_on_graph

    events_on_vertices = graph.new_vertex_property('object')
    graph.vertex_properties['events_on_vertices'] = events_on_vertices

    actors_on_vertices = graph.new_vertex_property('string')
    graph.vertex_properties['actors_on_vertices'] = actors_on_vertices

    weights_on_edges = graph.new_edge_property('long double')
    graph.edge_properties['weights_on_edges'] = weights_on_edges

    # pre_vertices = []
    pre_events_map = {}
    pre_vertices_map = {}

    owner_vertex = graph.add_vertex()
    owner = repo.split('/')[0]
    dummy_event = {'created_at': events[0]['repo-created_at']}
    actors_on_vertices[owner_vertex] = owner
    events_on_vertices[owner_vertex] = dummy_event
    pre_vertices_map[owner] = owner_vertex
    pre_events_map[owner] = dummy_event

    events = sorted(events, key=lambda x: x['created_at'])

    for event in events:
        actor = event['actor']

        if actor in pre_events_map:
            continue

        created_at = event['created_at']

        vertex = graph.add_vertex()
        events_on_vertices[vertex] = event
        actors_on_vertices[vertex] = actor

        # if 'actor-following' not in event:
        #     continue

        following = set(event['actor-following'])
        commons = following.intersection(pre_vertices_map.keys())

        # pre_vertices.append(vertex)

        # if len(commons) == 0:
        #     edge = graph.add_edge(vertex, owner_vertex)
        #     weights_on_edges[edge] = 1.0

        for pre_actor in commons:

            interval =\
                (created_at - pre_events_map[pre_actor]['created_at']).days

            if interval < 0:
                continue

            edge = graph.add_edge(vertex, pre_vertices_map[pre_actor])

            if pre_actor == owner:
                weight = 1.0
            else:
                weight = 1.0 / fib(interval + 2)

            weights_on_edges[edge] = weight

        pre_events_map[actor] = event
        pre_vertices_map[actor] = vertex

    return graph
Esempio n. 40
0
data = client.get_collection_view('[database page link]')

graph = Graph(directed=False)

# Dictionary from entry id to vertex id to keep track of vertices that have been added
entries = {}

vertex_names = graph.new_vertex_property('string')

# For every table entry
for row in data.collection.get_rows():

    # If this entry is not in the graph, add it
    if row.id not in entries:
        entries[row.id] = graph.add_vertex()
        vertex_names[entries[row.id]] = row.title

    # Add the links of the current entry to the graph
    for child in row.children:
        if isinstance(child, CollectionRowBlock):

            if child.id not in entries:
                entries[child.id] = graph.add_vertex()
                vertex_names[entries[child.id]] = child.title

            graph.add_edge(entries[row.id], entries[child.id])

graph_draw(graph,
           vertex_text=vertex_names,
           vertex_font_size=8,
Esempio n. 41
0
class TTC(AbstractMatchingAlgorithm):
    """This class searches for cycles where each student gets his best option.

    This takes a list of students, a list of schools and a ruleset
    (which is used to calculate priorities).
    This works by generating a directed graph, where each student points
    at at his best option, and each school points at the student (or students)
    with the highest priority.
    """

    EDGE_WIDTH_SIZE_FACTOR = 700
    """Size factor (in the image) of each edge that is not part of the main cycle."""
    EDGE_WIDTH_CYCLE_SIZE = 10
    """Size factor (in the image) of each edge that takes part of the main cycle."""
    def __init__(self,
                 generate_images=False,
                 images_folder="TTC_images",
                 use_longest_cycle=True):
        """Initializes the algorithm.

        :param generate_images: If the process generates images or not.
        :type generate_images: bool
        :param images_folder: Where images are saved.
        :type images_folder: str
        :param use_longest_cycle: If the algorithm applies the longest cycle available, or the first one encountered.
        :type use_longest_cycle: bool
        """
        self.generate_images = generate_images
        self.images_folder = images_folder
        self.use_longest_cycle = use_longest_cycle

        self.__graph = None
        self.__vertices_by_school_id = None
        self.__vertices_by_student_id = None
        self.__students_by_id = None
        self.__schools_by_id = None

        self.__entity_id = None
        self.__entity_type = None

    def reset_variables(self):
        """Resets all variables."""
        self.__graph = Graph()
        self.__vertices_by_school_id = {}
        self.__vertices_by_student_id = {}
        self.__students_by_id = {}
        self.__schools_by_id = {}

        self.__entity_id = self.__graph.new_vertex_property("int")
        self.__graph.vertex_properties["entity_id"] = self.__entity_id

        self.__entity_type = self.__graph.new_vertex_property("string")
        self.__graph.vertex_properties["entity_type"] = self.__entity_type

    def run(self, students, schools, ruleset):
        """Runs the algorithm.
        First it creates the graph, then it lists all the cycles available,
        after that it selects one cycle, and applies it. Finally, it starts
        the process again.

        :param students: List of students.
        :type students: list
        :param schools: List of school.
        :type schools: list
        :param ruleset: Set of rules used.
        :type ruleset: Ruleset
        """
        self.reset_variables()

        can_improve = True
        iteration_counter = 1

        while can_improve:

            self.structure_graph(students, schools)

            cycles = [c for c in all_circuits(self.__graph, unique=True)]
            # print("CYCLES", cycles, "iteration", iteration_counter)

            cycle_edges = []

            if cycles:
                for cycle in cycles:  # ToDo: Possible optimisation: apply all disjoint cycles at once
                    for current_v_index in range(len(cycle)):
                        next_v_index = (current_v_index + 1) % len(cycle)

                        from_v = self.__graph.vertex(cycle[current_v_index])
                        target_v = self.__graph.vertex(cycle[next_v_index])
                        edge = self.__graph.edge(from_v, target_v)
                        cycle_edges.append(edge)

                        if self.__entity_type[from_v] == "st":
                            sel_student = self.__students_by_id[
                                self.__entity_id[from_v]]
                            sel_school = self.__schools_by_id[
                                self.__entity_id[target_v]]
                            sel_student.assigned_school = sel_school
                            sel_school.assignation.append(sel_student)

                        # vertex_school_target_id = self.__entity_id[target_v]
                        # vertex_school_target = self.__schools_by_id[vertex_school_target_id]

                        # print("CYCLE: Student", sel_student.id, "School", sel_school.id)

                        # print("VVV: School {} -> School {}    (Student {}) ".format(self.__entity_id[from_v], self.__entity_id[target_v], self.__entity_id[self.__graph.edge(from_v, target_v)]))

                    if self.generate_images:
                        self.generate_image(cycle_edges,
                                            iteration_n=iteration_counter)
            else:
                can_improve = False

            self.__graph.clear()
            iteration_counter += 1

    def structure_graph(self, students, schools):
        """Creates a graph where students points to schools, and schools points to students.

        In the graph, each student points at at his best option, and each school points
        at the student (or students) with the highest priority.

        :param students: List of students.
        :type students: list
        :param schools: 
        :type schools: list
        """
        if not self.__students_by_id and not self.__schools_by_id:
            for student in students:
                self.__students_by_id[student.id] = student
            for school in schools:
                self.__schools_by_id[school.id] = school

        for school in schools:
            setattr(school, 'preferences',
                    StudentQueue(school, preference_mode=True))

        remaining_students = [
            student for student in students if not student.assigned_school
        ]

        for student in remaining_students:
            for pref_school in student.preferences:
                pref_school.preferences.append(student)

        for student in remaining_students:
            v_source_student = self.create_vertex_student(student)

            pref_school = next(
                (school for school in student.preferences if
                 len(school.assignation.get_all_students()) < school.capacity),
                None)

            if pref_school:
                v_target_school = self.create_vertex_school(pref_school)
                self.create_edge(v_source_student, v_target_school)

        for school in schools:
            if len(school.assignation.get_all_students()) < school.capacity:
                v_source_school = self.create_vertex_school(school)

                pref_student = next(
                    iter(school.preferences.get_all_students()), None)

                if pref_student:
                    v_target_student = self.create_vertex_student(pref_student)
                    self.create_edge(v_source_school, v_target_student)

        # graph_draw(self.__graph,
        #            vertex_text=self.__entity_id, vertex_shape="circle",
        #            output_size=(1000, 1000), bg_color=[1., 1., 1., 1], output="graph.png")

    def create_vertex_student(self, student):
        """Defines a new student as a vertex in the graph (if it did not existed before)."""
        if student.id in self.__vertices_by_student_id:
            vertex = self.__vertices_by_student_id[student.id]
        else:
            vertex = self.__graph.add_vertex()
            self.__vertices_by_student_id[student.id] = vertex
            self.__entity_id[vertex] = student.id
            self.__entity_type[
                vertex] = "st"  # ToDo: There may be other ways to do this.
        return vertex

    def create_vertex_school(self, school):
        """Defines a new school as a vertex in the graph (if it did not existed before)."""
        if school.id in self.__vertices_by_school_id:
            vertex = self.__vertices_by_school_id[school.id]
        else:
            vertex = self.__graph.add_vertex()
            self.__vertices_by_school_id[school.id] = vertex
            self.__entity_id[vertex] = school.id
            self.__entity_type[vertex] = "sc"
        return vertex

    def create_edge(self, source_v, target_v):
        """Creates a directed edge between two vertices."""
        self.__graph.add_edge(source_v, target_v)

    def generate_image(self, cycle_edges, iteration_n=0):
        """Generates an image of a graph.

        :param cycle_edges: Edges which are part of the main cycle (they will be highlighted in red).
        :type cycle_edges: list
        :param iteration_n: Number of iteration of the algorithm (this is added in the filename of the image).
        :type iteration_n: int

        .. DANGER::
        This is an experimental feature.
        """
        edge_color = self.__graph.new_edge_property("vector<float>")
        edge_width = self.__graph.new_edge_property("int")

        for edge in self.__graph.edges():
            if edge in cycle_edges:
                edge_color[edge] = [1., 0.2, 0.2, 0.999]
                edge_width[edge] = 7
            else:
                edge_color[edge] = [0., 0., 0., 0.3]
                edge_width[edge] = 4

        vertex_shape = self.__graph.new_vertex_property("string")
        vertex_size = self.__graph.new_vertex_property("int")

        for vertex in self.__graph.vertices():
            if self.__entity_type[vertex] == "st":
                vertex_shape[vertex] = "circle"
                vertex_size[vertex] = 1
            else:
                vertex_shape[vertex] = "double_circle"
                vertex_size[vertex] = 100

        # pos = sfdp_layout(self.__graph, C=10, p=5, theta=2, gamma=1)
        pos = arf_layout(self.__graph, d=0.2, a=3)
        graph_draw(
            self.__graph,
            pos=pos,
            vertex_text=self.__entity_id,
            vertex_font_size=
            1,  # ToDo: Move image related code outside the class.
            vertex_fill_color=[0.97, 0.97, 0.97, 1],
            vertex_color=[0.05, 0.05, 0.05, 0.95],
            vertex_shape=vertex_shape,
            edge_color=edge_color,
            edge_pen_width=edge_width,
            output_size=(1000, 1000),
            bg_color=[1., 1., 1., 1],
            output=self.generate_filename(iteration_n))

    def generate_filename(self, iteration_n):  # ToDo: Move this to utils
        """Returns a filename (which is used to generate the images)."""
        filename = "Graph (iteration {})".format(
            iteration_n) if iteration_n > 0 else "Graph"
        output_file = gen_filepath(self.images_folder,
                                   filename=filename,
                                   extension="png")
        return output_file