コード例 #1
0
    def subgraphIsomorphismCheck(self, G1, G2):
        """
        Checks whether G1 contains a subgraph isomorphic to G2 by using Whitney isomorphism theorem.

        Parameters:
        G1 (NetworkX graph): The bigger graph.
        G2 (NetworkX graph): The smaller graph.

        Returns:
        bool: Is graph G2 isomorphic to a subgraph in G1.
        isomorphism.GraphMatcher: Graph matcher object with parameters.
        """
        # transform graphs into line graphs and check for subgraph isomorphism
        # isomorphism.GraphMatcher tries to find an induced subgraph of G1, such that it is isomorphic to G2.
        # Consequently, if G2 is non-induced subgraph of G1, the algorithm will return False
        GM = isomorphism.GraphMatcher(nx.line_graph(G1), nx.line_graph(G2))
        subgraph_is_iso = GM.subgraph_is_isomorphic()

        # check for exceptions
        # e.g. line graphs of K_3 triangle graph and K_1,3 claw graph are isomorphic, but the original graphs are not
        if subgraph_is_iso:
            edgeListG1 = []
            edgeListG2 = []

            for edgeMaping in GM.mapping.items():
                edgeListG1.append(edgeMaping[0])
                edgeListG2.append(edgeMaping[1])

            # let's construct the graphs the algorithm thinks are isomorphic and check them for a quick isomorphism.is_isomorphic
            testG1 = nx.Graph(edgeListG1)
            testG2 = nx.Graph(edgeListG2)
            subgraph_is_iso = isomorphism.is_isomorphic(testG1, testG2)

        return subgraph_is_iso, GM
コード例 #2
0
def centrality(file_graphml, road_type, place_country, bc=False, cc=False):
    # load grapho
    grafo = ox.load_graphml(file_graphml)
    #### replace "length" values with "cost" values #####
    # for u, v, key, attr in grafo.edges(keys=True, data=True):
    #     # print(attr)
    #     print(attr["length"])
    #     attr['length'] = attr.get("cost")
    #     # grafo.add_edge(u, v, key, attr_dict=attr)
    #     grafo.add_edge(u, v, key)
    # ox.extended_stats(grafo, bc=True)
    if cc:
        c_name = "close_centrality"
        edge_centrality = nx.closeness_centrality(nx.line_graph(grafo))
    if bc:
        c_name = "btw_centrality"
        edge_centrality = nx.betweenness_centrality(nx.line_graph(grafo), weight='pippo')  # not working!!!!!
    ev = [edge_centrality[edge + (0,)] for edge in grafo.edges()]
    # color scale converted to list of colors for graph edges
    norm = colors.Normalize(vmin=min(ev)*0.8, vmax=max(ev))
    # cividis, viridis, YlGn  (good colormaps
    # 'Greys', 'Purples', 'Blues', 'Greens', 'Oranges', 'Reds',
    #             'YlOrBr', 'YlOrRd', 'OrRd', 'PuRd', 'RdPu', 'BuPu',
    #             'GnBu', 'PuBu', 'YlGnBu', 'PuBuGn', 'BuGn', 'YlGn',
    #             'viridis', 'plasma', 'inferno', 'magma', 'cividis']
    cmap = cm.ScalarMappable(norm=norm, cmap=cm.YlGn)
    ec = [cmap.to_rgba(cl) for cl in ev]
    fig, ax = ox.plot_graph(grafo, bgcolor='k', axis_off=True, node_size=0, node_color='w',
                            node_edgecolor='gray', node_zorder=2,
                            edge_color=ec, edge_linewidth=1.5, edge_alpha=1)

    gdf_edges = ox.graph_to_gdfs(grafo, nodes=False, fill_edge_geometry=True)
    gdf_edges['edge_color'] = ec

    my_map = plot_graph_folium_FK(gdf_edges, graph_map=None, popup_attribute=None,
                            zoom=1, fit_bounds=True, edge_width=2, edge_opacity=1) #tiles='cartodbpositron'
    name_place_country = re.sub('[/," ",:]', '_', place_country)
    road_type = road_type.replace(' ', '')
    road_type = list(road_type.split(","))
    roadtype = ' '.join([str(elem) for elem in road_type])
    roads = re.sub('[/," ",:]', '_', roadtype)
    my_map.save(c_name + "_" + roads + "_" + name_place_country + ".html")

#############################################################
#############################################################


# road_type = "motorway, motorway_link"
# # road_type = "secondary"
# # road_type = "motorway, motorway_link, secondary, primary, tertiary"
# place_country = "Catania, Italy"
# # file_graphml = 'Catania__Italy.graphml'  # to be used when run cost assignment and shortest path calculation (no saving!!)
# file_graphml = 'Catania__Italy_cost.graphml' # to be used when run folium map classification and centrality
# distance = 20000
# bc=False
# cc=True
コード例 #3
0
ファイル: help.py プロジェクト: predmach/DeFiNe
def partition_graph(aa,bb,gg):

    E=gg.number_of_edges()
    gl=nx.line_graph(gg)
    lbl={}
    for i,g in enumerate(gg.edges()):
        lbl[g]=i
    gl=nx.relabel_nodes(gl,lbl)

    ad=nx.all_pairs_shortest_path_length(gl)
    am=np.zeros((E,E))
    for ni,n in enumerate(gl.nodes()):
        for mi,m in enumerate(gl.nodes()):
            if(n in ad.keys()):
                if(m in ad[n].keys()):
                    am[ni,mi]=ad[n][m]
                    am[mi,ni]=ad[n][m]
    M=int(am.max()+0.5)+1
    am[am==0]=M
    hh=np.zeros((M,2,2))
    for e0 in range(E):
        for e1 in range(e0):
            xa=np.max([(e0 in i)+(e1 in i) for i in aa])
            xb=np.max([(e0 in i)+(e1 in i) for i in bb])
            di=int(am[e0,e1])-1
            if(xa==2 and xb==2):
                hh[di,0,0]+=1.0
            elif(xa<2 and xb==2):
                hh[di,1,0]+=1.0
            elif(xa==2 and xb<2):
                hh[di,0,1]+=1.0
            elif(xa<2 and xb<2):
                hh[di,1,1]+=1.0

    return hh
コード例 #4
0
ファイル: utils.py プロジェクト: hjdddzaa/visions
def get_contains_cases(_test_suite, _series_map: Dict[Type[VisionsBaseType],
                                                      Set[str]], typeset):
    """Parametrize contains tests

    Args:
        mapping: mapping from type to a set of series' identifiers

    Returns:
        the args for the generated tests
    """

    # Include children's series in parent
    reversed_topological_edge_sort = list(
        reversed(list(nx.topological_sort(nx.line_graph(typeset.base_graph)))))
    for parent, child in reversed_topological_edge_sort:
        _series_map[parent] |= _series_map[child]

    all_series_included(_test_suite, _series_map)

    argsvalues = []
    for item in _test_suite:
        for type, series_list in _series_map.items():
            args = {"id": f"{item.name} x {type}"}

            member = item.name in series_list
            argsvalues.append(pytest.param(item, type, member, **args))

    return {"argnames": "series,type,member", "argvalues": argsvalues}
コード例 #5
0
def max_span_trail(D, G, input_arc, cache, reactions):
    """Given a directed graph D and an undirected graph G built on the same
    vertex set and in input arc in D, returns a trail of maximum span in D that
    passes through the input arc, such that its vertex set induces a connected
    subgraph in G.

    The cache dict is modified, storing best partial paths in every SCC of LD
    between all possible pairs of entry and exit points from predecessor SCCs to
    successor SCCs.

    :param D: input directed graph for the HNet algorithm, with the same vertex
        set as G and representing a metabolic pathway
    :param G: input undirected graph for the HNet algorithm, with the same
        vertex set as D and representing gene neighborhood (in terms of
        reactions; see Model in the methods section of Zaharia et al., 2018)
    :param input_arc: input arc in D on which the HNet algorithm is executed
    :param cache: dict storing best partial paths for every strongly connected
        component (SCC) of the line graph of D
    :param reactions: dict of dicts storing reaction information (obtained by
        parsing a KGML file)
    :return: a trail of maximum span in D such that its vertex set induces a
        connected subgraph in G
    """
    LD = nx.line_graph(D)
    if nx.number_of_nodes(LD) == 0:
        return list()

    access = AccessPoints(LD)  # access points for every SCC in the line graph
    partial_paths(LD, access, cache, reactions)

    return get_corresponding_trail(
        max_span_path(LD, G, input_arc, cache, reactions))
コード例 #6
0
 def _complete_summaries(self):
     for from_type, to_type in nx.topological_sort(
         nx.line_graph(self.typeset.base_graph)
     ):
         self.summary_map[to_type] = (
             self.summary_map[from_type] + self.summary_map[to_type]
         )
コード例 #7
0
 def vertexes_to_edges_graph(self, curr_path, v_graph):
     """
     Description:
     Converts vertex to edges in a graph.
     Input:
     curr_path - path of a current graphml file
     v_graph - the vertex graph
     Output:
     e_graph - edges graph.
     """
     xmldoc = minidom.parse(curr_path)
     itemlist = xmldoc.getElementsByTagName('edge')
     e_graph = nx.line_graph(v_graph)
     if itemlist[0].firstChild==None:
         ds=nx.degree_centrality(v_graph)
         nx.set_node_attributes(v_graph,ds,'label')
         return v_graph
     for item in itemlist:
         value = float(item.firstChild.TEXT_NODE)
         source = item.attributes['source'].value
         target = item.attributes['target'].value
         if (source, target) not in e_graph.nodes.data():
             e_graph.add_node((source, target))
         e_graph.nodes.data()[(source, target)].update({'label': value})
     [x[1].update({'value': 0}) for x in e_graph.nodes.data() if len(x[1]) == 0]
     return e_graph
コード例 #8
0
    def __init__(self, g, features, n_machines, radius, activation, device):
        super().__init__()
        adj = nx.adj_matrix(g)
        p = my.adj2p(sp.sparse.triu(adj))
        adj = tf.cast(my.sparse_sp2tf(adj), tf.float32)
        deg = tf.expand_dims(tf.sparse_reduce_sum(adj, 1), 1)

        lg = nx.line_graph(g)
        adj_lg = tf.cast(my.sparse_sp2tf(nx.adj_matrix(lg)), tf.float32)
        deg_lg = tf.expand_dims(tf.sparse_reduce_sum(adj_lg, 1), 1)

        for i, (m, n) in enumerate(zip(features[:-1], features[1:])):
            setattr(
                self, 'layer%d' % i,
                GNNLayer(m, n, adj, deg, adj_lg, deg_lg, p, radius,
                         activation))

        self.n_layers = i + 1

        self.dense = tf.keras.layers.Dense(input_shape=(n, ), units=n_machines)

        self.device = device

        with tf.device(device):
            x = deg
            x -= tf.reduce_mean(x)
            x /= tf.sqrt(tf.reduce_mean(tf.square(x)))

            y = deg_lg
            y -= tf.reduce_mean(y)
            y /= tf.sqrt(tf.reduce_mean(tf.square(y)))

            self.x, self.y = x, y
コード例 #9
0
def lineDigraph(n, temp):  # L(multidigraph)を出力
    # multigraph
    V = range(1, n + 1)
    G = nx.MultiDiGraph()
    G.add_nodes_from(V)
    for (i, j) in temp:
        if (temp[i, j] > 0):
            for k in range(0, temp[i, j]):
                G.add_edge(i, j)
    # LineDigraph of G
    L = nx.line_graph(G)
    #adjance matrix : L.edges() → m
    n = len(L.nodes())
    v = {}
    atai = 1
    for i in L.nodes():
        v[i] = atai
        atai += 1
    m = {}
    for i in range(1, n + 1):
        for j in range(1, n + 1):
            m[i, j] = 0
    for (i, j) in L.edges():
        m[v[i], v[j]] = 1
    return n, m
コード例 #10
0
ファイル: planner_indoor.py プロジェクト: jeguzzi/resilience
def line_graph(layer,
               target_state,
               hidden_nodes=None,
               not_traversable_states=[]):
    graph, hidden_nodes = _traversability_graph(
        layer,
        hidden_nodes=hidden_nodes,
        not_traversable_states=not_traversable_states)
    line_graph = nx.line_graph(graph)
    line_graph.edges_in_state = collections.defaultdict(list)
    for e in line_graph.edges(data=True):
        n1, n2, data = e
        (_, so, ti1) = n1
        (_, _, ti2) = n2
        s = _state_for_line_graph_edge(e)
        p1 = layer.transitions[ti1].geometry.centroid
        p2 = layer.transitions[ti2].geometry.centroid
        data['length'] = p1.distance(p2)
        line_graph.edges_in_state[s].append(e[:2])
    target = _add_to_line_graph(layer, graph, line_graph, to_node=target_state)
    hidden_state = [line_graph.edges_in_state[node] for node in hidden_nodes]
    for n, data in line_graph.nodes(data=True):
        data['observe'] = []
    for i, edges in enumerate(hidden_state):
        for x, y in edges:
            line_graph[x][y][0]['hidden'] = True
            line_graph.node[x]['observe'].append(i)
            line_graph.node[y]['observe'].append(i)
    return graph, nx.Graph(line_graph), target, hidden_state
コード例 #11
0
def line_graphs():
    print("Line graph")
    G = nx.star_graph(3)
    L = nx.line_graph(G)
    print(sorted(map(sorted, L.edges())))  # makes a 3-clique, K3
    draw_graph(G)
    draw_graph(L)
コード例 #12
0
ファイル: tests.py プロジェクト: RobertJClarke/Berge-Graphs
def test_is_berge(pool : Pool, alt=False):
    
    if alt:
        from alternate import is_berge_alt as is_berge
    else:
        from berge import is_berge

    # Note that graphs are perfect iff they are Berge

    for i in range(5):
        # Bipartite graphs are always Berge

        n1, n2 = random.randint(1, 12), random.randint(1, 12)
        
        graph = random_bipartite(n1=n1, n2=n2, p=.4)

        assert(is_berge(graph, pool=pool))

    for i in range(5):
        graph = nx.line_graph(random_bipartite(n1=10, n2=10, p=.15))

        # Line graphs of bipartite graphs are perfect by Konig's theorem

        assert(is_berge(graph, pool=pool))

    for i in range(10, 15):
        assert(is_berge(nx.complete_graph(i),  pool=pool))

    for i in range(5):

        # Make sure we work properly on disconnected graphs

        graph = nx.disjoint_union_all([
            random_bipartite(
                random.randint(1, 6), 
                random.randint(1, 6), .2) 
            for i in range(3)])

        assert(is_berge(graph,  pool=pool))

    for i in range(5):
        m = random.randint(2, 12)

        graph = nx.triangular_lattice_graph(m, 2)

        assert(is_berge(graph, pool=pool))

    for i in range(5):
        n = random.randint(4, 20)

        graph = random_chordal(n, .2)

        assert(is_berge(graph, pool=pool))

    for i in range(10):
        n = random.randint(4, 20)

        graph = nx.cycle_graph(n)

        assert(is_berge(graph, pool=pool) == (n % 2 == 0))
コード例 #13
0
def get_contains_cases(
    _test_suite: Dict[str, Sequence],
    _series_map: Dict[T, Set[str]],
    typeset: VisionsTypeset,
):
    """Parametrize contains tests

    Args:
        _test_suite: mapping from sequence identifiers to sequences
        _series_map: mapping from type to a set of sequence identifiers
        typeset: A VisionsTypeset

    Returns:
        the args for the generated tests
    """

    # Include children's series in parent
    reversed_topological_edge_sort = list(
        reversed(list(nx.topological_sort(nx.line_graph(typeset.base_graph))))
    )
    for parent, child in reversed_topological_edge_sort:
        _series_map[parent] |= _series_map[child]

    all_series_included(_test_suite, _series_map)

    argsvalues = []
    for name, item in _test_suite.items():
        for type, series_list in _series_map.items():
            args = {"id": f"{name} x {type}"}

            member = name in series_list
            argsvalues.append(pytest.param(name, item, type, member, **args))

    return {"argnames": ["name", "series", "type", "member"], "argvalues": argsvalues}
コード例 #14
0
ファイル: utils.py プロジェクト: xiawenwen49/GNPP_code
 def __init__(self, root, N=100, deg=2, p=0.3, seed=123):
     """
     Args:
         # G: network topology
         root: dir for save G and edge_timestamps
         N: original graph, with N nodes
         deg: original graph, with average degree=deg.
             The original graph should have N nodes, N*deg edges.
             For timestamp simulation, the number of 'nodes' should be N*deg. Because we regard each edge of the original graph as one event 'type'.
             But we share a same model for all edges.
             And the 'adjacency' for tick should be [N*deg, N*deg]
         p: rewire probability
         seed: random seed
     """
     self.root = root
     self.N = N
     self.deg = deg
     self.p = p
     self.seed = seed
     self.G = self.generate_G(self.N, self.deg, self.p, seed=self.seed)
     self.G_e2n = nx.line_graph(self.G)
     self.elabel_idx_map = dict(
         zip(self.G_e2n.nodes(), range(self.G_e2n.number_of_nodes())))
     self.idx_elabel_map = dict(
         zip(range(self.G_e2n.number_of_nodes()), self.G_e2n.nodes()))
     self.hawkes = []
コード例 #15
0
    def to_graph(self, personalization=None):
        ''' builds document graph from several link types '''
        svos = self.svos
        g = nx.DiGraph()
        for e in self.to_edges():
            f, t = e
            g.add_edge(f, t)
        if self.params.svo_edges:
            for s, v, o in svos:
                if s == o: continue
                if v == 'as_in':
                    g.add_edge(s, o)
                else:
                    g.add_edge(o, s)

        if personalization == None and self.params.pers_idf:
            personalization = self.pers_from_freq(get_freqs())

        pr = nx.pagerank(g, personalization=personalization)
        if self.params.use_line_graph and g.number_of_edges() < 20000:
            lg = nx.line_graph(g)
            lpr = nx.pagerank(lg)
            for xy, r in lpr.items():
                x, y = xy
                if isinstance(x, str) and isinstance(y, str):
                    pr[x] = pr[x] + r
                    pr[y] = pr[y] + r
        return g, pr
コード例 #16
0
def to_line_graph(data: Data, directed: bool = True) -> Data:
    """
    Convert a graph G to its corresponding line-graph L(G)
    Args:
        data: a torch_gemoetric Data object representing representing a graph
        directed: whether the original graph is directed or undirected
    """
    original_edge_attrs = data.edge_attr
    original_edge_names = [
        (from_.item(), to_.item())
        for from_, to_ in zip(data.edge_index[0, :], data.edge_index[1, :])
    ]
    original_edge_to_attr = {
        e: attr
        for e, attr in zip(original_edge_names, original_edge_attrs)
    }
    ctor = nx.DiGraph if directed else nx.Graph
    G = to_networkx(data,
                    node_attrs=['x'],
                    edge_attrs=['edge_attr'],
                    to_undirected=not directed)
    line_graph = nx.line_graph(G, create_using=ctor)
    res_data = from_networkx(line_graph)

    # Copy original attribtues
    res_data.x = torch.stack(
        [original_edge_to_attr[e] for e in line_graph.nodes])
    res_data.y = data.y
    return data
コード例 #17
0
 def __init__(self, mol, diameter=8, ignore_hydrogen=True, timeout=5):
     mol.require("Valence")
     self.diam = diameter
     self.ignoreh = ignore_hydrogen
     self.timeout = timeout
     # Results
     self.array = []
     self.max_size = 0
     self.int_to_node = {}
     self.elapsed_time = 0
     self.valid = False
     if len(mol) < 3:
         return
     start_time = time.perf_counter()
     self.mol = mol
     self.preprocess()
     # Generate line graph and reindexing
     lg = nx.line_graph(self.mol.graph)
     node_to_int = {}
     for i, ln in enumerate(lg.nodes()):
         node_to_int[ln] = i
         lg.nodes[ln]["type"] = self.node_desc(ln)
     self.int_to_node = {v: k for k, v in node_to_int.items()}
     self.graph = nx.relabel_nodes(lg, node_to_int)
     # Edges
     edges = []
     for u, v, attr in self.edge_gen():
         edges.append((u, v))
         self.array.append((u, v, self.edge_desc(attr)))
     # Max fragment size determination
     fcres = find_cliques(self.graph.nodes(), edges, timeout=timeout)
     self.max_size = len(fcres["max_clique"])
     self.elapsed_time = round(time.perf_counter() - start_time, 7)
     self.valid = not fcres["timeout"]
コード例 #18
0
def get_qubit_registers_for_adder(qc: QuantumComputer, num_length: int,
                                  qubits: Optional[Sequence[int]] = None)\
        -> Tuple[Sequence[int], Sequence[int], int, int]:
    """
    Searches for a layout among the given qubits for the two n-bit registers and two additional
    ancilla that matches the simple layout given in figure 4 of [CDKM96]_.

    This method ignores any considerations of physical characteristics of the qc aside from the
    qubit layout. An error is thrown if the appropriate layout is not found.

    :param qc: the quantum resource on which an adder program will be executed.
    :param num_length: the length of the bitstring representation of one summand
    :param qubits: the available qubits on which to run the adder program.
    :return: the necessary registers and ancilla labels for implementing an adder
        program to add the numbers a and b. The output can be passed directly to :func:`adder`
    """
    if qubits is None:
        unavailable = []  # assume this means all qubits in qc are available
    else:
        unavailable = [qubit for qubit in qc.qubits() if qubit not in qubits]

    graph = qc.qubit_topology().copy()
    for qubit in unavailable:
        graph.remove_node(qubit)

    # network x only provides subgraph isomorphism, but we want a subgraph monomorphism, i.e. we
    # specifically want to match the edges desired_layout with some subgraph of graph. To
    # accomplish this, we swap the nodes and edges of graph by making a line graph.
    line_graph = nx.line_graph(graph)

    # We want a path of n nodes, which has n-1 edges. Since we are matching edges of graph with
    # nodes of layout we make a layout of n-1 nodes.
    num_desired_nodes = 2 * num_length + 2
    desired_layout = nx.path_graph(num_desired_nodes - 1)

    g_matcher = nx.algorithms.isomorphism.GraphMatcher(line_graph,
                                                       desired_layout)

    try:
        # pick out a subgraph isomorphic to the desired_layout if one exists
        # this is an isomorphic mapping from edges in graph (equivalently nodes of line_graph) to
        # nodes in desired_layout (equivalently edges of a path graph with one more node)
        edge_iso = next(g_matcher.subgraph_isomorphisms_iter())
    except IndexError:
        raise Exception(
            "An appropriate layout for the qubits could not be found among the "
            "provided qubits.")

    # pick out the edges of the isomorphism from the original graph
    subgraph = nx.Graph(graph.edge_subgraph(edge_iso.keys()))

    # pick out an endpoint of our path to start the assignment
    start_node = -1
    for node in subgraph.nodes:
        if subgraph.degree(node) == 1:  # found an endpoint
            start_node = node
            break

    return assign_registers_to_line_or_cycle(start_node, subgraph, num_length)
コード例 #19
0
def generateLabeledLineGraph(G):
	lineGraph=nx.line_graph(G)
	for vertexIndex in lineGraph:
		lineGraph.node[vertexIndex]['label']=(G.node[vertexIndex[0]]['label'],G.node[vertexIndex[1]]['label'])
	for n,nbrsdict in lineGraph.adjacency_iter():
		for nbr,eattr in nbrsdict.items():
			lineGraph.edge[n][nbr]['label']=G.node[findCommonNode(n,nbr)]['label']
	return lineGraph
コード例 #20
0
def test(graph):
    if (fetch.is_line_graph(graph, G1, G2, G3, G4, G5, G6, G7, G8, G9)):
        line_graph = nx.line_graph(graph)
        euler = fetch.euler_cycle(line_graph)
        if (euler != False):
            return list(euler)
        return euler
    return (fetch.is_line_graph(graph, G1, G2, G3, G4, G5, G6, G7, G8, G9))
コード例 #21
0
def seed_graph(M, connected=False):

    if connected: suffix = "c"
    else: suffix = "d1"

    for idx, seed in enumerate(
            nx.read_graph6(f"data/undirected/{M}{suffix}.g6")):
        yield nx.convert_node_labels_to_integers(nx.line_graph(seed))
コード例 #22
0
    def edge_distances(self, e):
        """ Return vector of shortest path edge distances between e
        and all other edges
        """
        L = nx.line_graph(self.G)
        path_lens = nx.shortest_path_length(L, source=e)

        return np.array([path_lens[f] for f in self.G.edges_iter()])
コード例 #23
0
	def generateLabeledLineGraph(G):
		lineGraph=nx.line_graph(G)
		for vertexIndex in lineGraph:
			lineGraph.node[vertexIndex]['label']=(G.node[vertexIndex[0]]['label'],G.node[vertexIndex[1]]['label'])
		for n,nbrsdict in lineGraph.adjacency_iter():
			for nbr,eattr in nbrsdict.items():
				lineGraph.edge[n][nbr]['label']=G.node[findCommonNode(n,nbr)]['label']
		return lineGraph
コード例 #24
0
ファイル: utils.py プロジェクト: jnowak90/CytoSeg2.0
def centralize_graph(graph, epb='lgth', efb='capa', ndg='capa', nec='capa', npr='capa'):
    """Compute edge centralities.

    Parameters
    ----------
    graph : original graph
    epb : edge property used for computation of edge path betweenness
    efb : "                                          flow betweenness
    ndg : "                                          degree centrality
    nec : "                                          eigenvector centrality
    npr : "                                          page rank

    Returns
    -------
    graphCentralities : graph with computed edge centralities

    """
    graphCentralities = graph.copy()
    edges = graphCentralities.edges(data=True)
    edgeCapacity = 1.0 * np.array([property['capa'] for node1, node2, property in edges])
    edgeCapacity /= edgeCapacity.sum()
    edgeLength = 1.0 / edgeCapacity
    for index, (node1, node2, property) in enumerate(edges):
        property['capa'] = edgeCapacity[index]
        property['lgth'] = edgeLength[index]
    edgeBetweenCentrality = nx.edge_betweenness_centrality(graphCentralities, weight=epb)
    edgeFlowBetweennessCentrality = nx.edge_current_flow_betweenness_centrality(graphCentralities, weight=efb)
    lineGraph = nx.line_graph(graphCentralities)
    degree = graphCentralities.degree(weight=ndg)
    for node1, node2, property in lineGraph.edges(data=True):
        intersectingNodes = list(set(node1).intersection(node2))[0]
        property[ndg] = degree[intersectingNodes]
    eigenvectorCentrality = nx.eigenvector_centrality_numpy(lineGraph, weight=ndg)
    pageRank = nx.pagerank(lineGraph, weight=ndg)
    degreeCentrality = dict(lineGraph.degree(weight=ndg))
    for index, (node1, node2, property) in enumerate(edges):
        edge = (node1, node2)
        if (edge in edgeBetweenCentrality.keys()):
            property['epb'] = edgeBetweenCentrality[edge]
        else:
            property['epb'] = edgeBetweenCentrality[edge[::-1]]
        if (edge in edgeFlowBetweennessCentrality.keys()):
            property['efb'] = edgeFlowBetweennessCentrality[edge]
        else:
            property['efb'] = edgeFlowBetweennessCentrality[edge[::-1]]
        if (edge in degreeCentrality.keys()):
            property['ndg'] = degreeCentrality[edge]
        else:
            property['ndg'] = degreeCentrality[edge[::-1]]
        if (edge in eigenvectorCentrality.keys()):
            property['nec'] = eigenvectorCentrality[edge]
        else:
            property['nec'] = eigenvectorCentrality[edge[::-1]]
        if (edge in pageRank.keys()):
            property['npr'] = pageRank[edge]
        else:
            property['npr'] = pageRank[edge[::-1]]
    return(graphCentralities)
コード例 #25
0
ファイル: Export.py プロジェクト: Natay/StableMotifs
def networkx_succession_diagram_motif_based(ar,
                                            include_attractors_in_diagram=True
                                            ):
    """Label the succesion diagram and (optionally) attractors of the input attractor
    repertoire according to the conventions of Zanudo and Albert (2015). Useful
    for plotting.

    Parameters
    ----------
    ar : AttractorRepertoire
        Attractor repertoire object for which to build the diagram.
    include_attractors_in_diagram : bool
        Whether attractors should be represented as nodes in the diagram (the
        default is True).

    Returns
    -------
    networkx.DiGraph
        A labeled digraph that represents the succession diagram.

    """
    G_reduced_network_based = networkx_succession_diagram_reduced_network_based(
        ar, include_attractors_in_diagram=False)
    G_motif_based = nx.line_graph(G_reduced_network_based)
    for i, j in G_motif_based.nodes():

        node_motif = set([
            frozenset(k.items()) for k in
            ar.succession_diagram.motif_reduction_dict[j].motif_history
        ]) - set([
            frozenset(k.items()) for k in
            ar.succession_diagram.motif_reduction_dict[i].motif_history
        ])
        node_label = format_reduction_label(str(dict(list(node_motif)[0])))
        G_motif_based.nodes[(i, j)]['label'] = node_label
        G_motif_based.nodes[(i,
                             j)]['virtual_nodes'] = dict(list(node_motif)[0])

    if include_attractors_in_diagram:
        for a_index, a in enumerate(ar.attractors):
            G_motif_based.add_node('A' + str(a_index))
            G_motif_based.nodes[
                'A' + str(a_index)]['label'] = format_reduction_label(
                    str(a.attractor_dict))
            G_motif_based.nodes[
                'A' + str(a_index)]['virtual_nodes'] = a.attractor_dict
            for r in a.reductions:
                r_key = list(
                    ar.succession_diagram.motif_reduction_dict.keys())[list(
                        ar.succession_diagram.motif_reduction_dict.values(
                        )).index(r)]
                for n in G_motif_based.nodes():
                    if type(n) == tuple:
                        i, j = n
                        if r_key == j:
                            G_motif_based.add_edge((i, j), 'A' + str(a_index))
    return G_motif_based
コード例 #26
0
def build_line_graph(people):
    """
    Edge coloring and Vizing's theorem solution 
    can be found from Stack Overflow question below
    ref: https://stackoverflow.com/questions/51758406/creating-time-schedule-from-list-of-people-and-who-they-have-to-meet
    """
    G = nx.Graph()
    G.add_edges_from(((p, q) for p, L in people for q in L))
    return nx.line_graph(G)
コード例 #27
0
ファイル: test_line.py プロジェクト: NikitaVAP/pycdb
 def test_line(self):
     G=nx.star_graph(5)
     L=nx.line_graph(G)
     assert_true(nx.is_isomorphic(L,nx.complete_graph(5)))
     G=nx.path_graph(5)
     L=nx.line_graph(G)
     assert_true(nx.is_isomorphic(L,nx.path_graph(4)))
     G=nx.cycle_graph(5)
     L=nx.line_graph(G)
     assert_true(nx.is_isomorphic(L,G))
     G=nx.DiGraph()
     G.add_edges_from([(0,1),(0,2),(0,3)])
     L=nx.line_graph(G)
     assert_equal(L.adj, {})
     G=nx.DiGraph()
     G.add_edges_from([(0,1),(1,2),(2,3)])
     L=nx.line_graph(G)
     assert_equal(sorted(L.edges()), [((0, 1), (1, 2)), ((1, 2), (2, 3))])
コード例 #28
0
 def genereate_line_graph(self):
     start_time = time.time()
     print("start generating line graph...")
     self.line_graph = nx.line_graph(self.graph)
     nx.write_gpickle(self.line_graph,
                      os.path.join("cache", "line_graph.gpickle"))
     if self.verbose:
         print("write line_graph in cache, using time",
               time.time() - start_time)
コード例 #29
0
 def test_line(self):
     G = nx.star_graph(5)
     L = nx.line_graph(G)
     assert_true(nx.is_isomorphic(L, nx.complete_graph(5)))
     G = nx.path_graph(5)
     L = nx.line_graph(G)
     assert_true(nx.is_isomorphic(L, nx.path_graph(4)))
     G = nx.cycle_graph(5)
     L = nx.line_graph(G)
     assert_true(nx.is_isomorphic(L, G))
     G = nx.DiGraph()
     G.add_edges_from([(0, 1), (0, 2), (0, 3)])
     L = nx.line_graph(G)
     assert_equal(L.adj, {})
     G = nx.DiGraph()
     G.add_edges_from([(0, 1), (1, 2), (2, 3)])
     L = nx.line_graph(G)
     assert_equal(sorted(L.edges()), [((0, 1), (1, 2)), ((1, 2), (2, 3))])
コード例 #30
0
ファイル: data_loader.py プロジェクト: moguizhizi/CS224W
def load_data_for_city(city, root_dir='.'):
    speeds_filename = 'movement-speeds-quarterly-by-hod-%s-2019-Q2.csv.zip' % city
    graph_filename = '%s.gpickle.gz' % city

    graph = nx.read_gpickle(os.path.join(root_dir, graph_filename))
    print('OSM MultiDiGraph has %d nodes, %d edges' %
        (nx.number_of_nodes(graph), nx.number_of_edges(graph)))
    graph = nx.Graph(graph)
    print('OSM Graph has %d nodes, %d edges' %
        (nx.number_of_nodes(graph), nx.number_of_edges(graph)))

    speeds = pd.read_csv(os.path.join(root_dir, speeds_filename))
    # Print basic stats on the data.
    speeds_num_rows = len(speeds)
    speeds_num_distinct_segment_ids = speeds['segment_id'].nunique()
    print('Speeds has %d rows, %d distinct segment IDs' %
        (speeds_num_rows, speeds_num_distinct_segment_ids))
    # Get p85 before dropping hours
    p85 = speeds.groupby('osm_way_id').mean()['speed_mph_p85']
    # Drop speeds with hour not in 7-10
    speeds_to_drop = (speeds['hour_of_day'] < 7) | (speeds['hour_of_day'] > 10)
    speeds.drop(speeds[speeds_to_drop].index, inplace=True)
    print('Dropped %d/%d Uber speeds with hour not in 7-10' % (speeds_to_drop.sum(), len(speeds_to_drop)))
    # For each OSM way ID, we'll just take the average of all the rows present
    speeds = speeds.groupby('osm_way_id').mean()[ ['speed_mph_mean', 'speed_mph_stddev']]
    speeds = speeds.join(p85)
    print('After processing, %d distinct OSM way IDs in the speeds dataset' % len(speeds))

    orig_graph = graph
    orig_speeds = speeds
    graph, speeds = merge_uber_osm_data(graph, speeds)
    print('After merging, graph has %d nodes, %d edges' %
        (nx.number_of_nodes(graph), nx.number_of_edges(graph)))

    # Do this after merging
    # Compute 1 - mean / p85 as the measure of traffic
    speeds['traffic'] = (1 - speeds['speed_mph_mean'] / speeds['speed_mph_p85']).clip(0.0, 1.0)
    # Group traffic into 5 classes like uber does
    speeds['traffic_class'] = speeds['traffic'].floordiv(0.2).astype('int')
    # Add traffic as edge attribute in the graph
    for v1, v2, edge in graph.edges(data=True):
        edge['traffic'] = speeds.loc[edge['osmid'], 'traffic']
        edge['traffic_class'] = speeds.loc[edge['osmid'], 'traffic_class']

    data = CityData(
        orig_speeds = orig_speeds,
        speeds = speeds,
        orig_graph = orig_graph,
        graph = graph,
        gmmc = nx.algorithms.community.greedy_modularity_communities(graph),
        line_graph = nx.line_graph(graph),
    )
    print('Greedy modularity maximization produced %d communities' % len(data.gmmc))
    print('Line graph has %d nodes, %d edges' %
        (nx.number_of_nodes(data.line_graph), nx.number_of_edges(data.line_graph)))
    return data
コード例 #31
0
    def make_graph(self, edges, edge_costs):
        '''Make a graph object'''
        graph = nx.Graph()
        for edge in edges:
            vertex_u, vertex_v = (edge.start,
                                  edge.end) if edge.start < edge.end else (
                                      edge.end, edge.start)
            graph.add_edge(vertex_u, vertex_v, weight=edge.cost)

        # Graph which has edges as vertices of 'graph' and vertices as edges of 'graph'
        alt_graph = nx.line_graph(graph)

        # Original vertex pair indexes to original edges
        alt_vertix_index_to_original_edges = {}
        # Original edges (pairs index) to original vertex pair indexes
        original_edge_to_alt_vertex_index = {}
        for index, original_edge in enumerate(alt_graph.nodes()):
            # The vertex is an original edge and pair of original vertex
            vertex_u, vertex_v = original_edge
            edge = (vertex_u, vertex_v) if vertex_u < vertex_v else (vertex_v,
                                                                     vertex_u)
            alt_vertix_index_to_original_edges[index] = edge
            original_edge_to_alt_vertex_index[(vertex_u, vertex_v)] = index
            original_edge_to_alt_vertex_index[(vertex_v, vertex_u)] = index

        alt_edges = {}
        max_weight = 0
        for index, alt_edge in enumerate(alt_graph.edges()):
            edge0_index = original_edge_to_alt_vertex_index[alt_edge[0]]
            edge1_index = original_edge_to_alt_vertex_index[alt_edge[1]]
            weight = (edge_costs[alt_edge[0]] + edge_costs[alt_edge[1]]) / 2.0
            alt_edges[(edge0_index, edge1_index)] = weight
            alt_edges[(edge1_index, edge0_index)] = weight
            max_weight = max(weight, max_weight)

        size = len(alt_graph.nodes())
        penalty = size * max_weight * 3
        edge_distances = []
        for from_index in range(0, size):
            row = []
            for to_index in range(0, size):
                edge0 = (from_index, to_index)
                edge1 = (to_index, from_index)
                if from_index == to_index:
                    weight = 0.0
                elif edge0 in alt_edges:
                    weight = alt_edges[edge0]
                elif edge1 in alt_edges:
                    weight = alt_edges[edge1]
                else:
                    weight = penalty
                row.append(weight)
            edge_distances.append(row)

        return graph, alt_vertix_index_to_original_edges, CreateDistanceCallback(
            edge_distances)
コード例 #32
0
ファイル: observable.py プロジェクト: asrvsn/graph-turbulence
 def __init__(self, *args, **kwargs):
     super().__init__(*args, **kwargs)
     # Weighted dual graph definition taken from Eq. 6, https://arxiv.org/pdf/0912.4389.pdf
     self.vertex_dual = nx.line_graph(self.G)
     off_diag = 1 - np.eye(len(self))
     inc = nx.incidence_matrix(self.G,
                               oriented=True)  # TODO: extract weights
     inv_deg = np.diag([(0. if self.G.degree[x] <= 1 else
                         (1 / self.G.degree[x])) for x in self.G.nodes()])
     self.vertex_dual_adj = inc.T @ inv_deg @ inc @ off_diag
コード例 #33
0
ファイル: SGNs.py プロジェクト: mingshuonie/SGNs-master
def to_line(graph):
    '''
	:param graph
	:return G_line: line/Subgraph network
	'''
    graph_to_line = nx.line_graph(graph)
    graph_line = nx.convert_node_labels_to_integers(graph_to_line,
                                                    first_label=0,
                                                    ordering='default')
    return graph_line
コード例 #34
0
def build_line_graph(people):
    """
    Edge coloring and Vizing's theorem solution 
    can be found from Stack Overflow question below

    ref: https://stackoverflow.com/questions/51758406/creating-time-schedule-from-list-of-people-and-who-they-have-to-meet
    """
    G = nx.Graph()
    G.add_edges_from(((p, q) for p, L in people for q in L))
    return nx.line_graph(G)
コード例 #35
0
    def cycle_distances(self, e):
        """ Return vector of shortest path cycle distances between
        e and all other edges
        """
        L = nx.line_graph(self.cycle_dual)

        # find all nodes of the line graph that e belongs to

        line_nodes = [list(combinations(self.edge_in_cycles[f], 2)) for f in self.G.edges_iter()]

        e_ind = self.G.edges().index(e)
        dists_from_all_sources = [nx.shortest_path_length(L, source=src) for src in line_nodes[e_ind]]

        cycle_dists = np.array(
            [
                [min([d[h] for h in line_nodes[i]]) for i in xrange(self.G.number_of_edges())]
                for d in dists_from_all_sources
            ]
        ).min(axis=0)

        return cycle_dists
コード例 #36
0
ファイル: analyzer.py プロジェクト: hronellenfitsch/nesting
def weighted_line_graph(G, average=False):
    """ Return a line graph of G where edge attributes are propagated
    properly. Node attributes are ignored.
    If average is set to True, perform an averaging over
    conductivities.
    """
    line_graph = nx.line_graph(G)
    line_graph.add_nodes_from((tuple(sorted((u, v))), d)
            for u, v, d in G.edges_iter(data=True))

    # average
    if average:
        new_node_conds = {}
        for n, d in line_graph.nodes_iter(data=True):
            neighbor_conds = mean([line_graph.node[m]['conductivity'] 
                    for m in line_graph.neighbors(n)])
            new_node_conds[n] = 0.5*(d['conductivity'] + 
                    neighbor_conds)

        for n, v in new_node_conds.iteritems():
            line_graph.node[n]['conductivity'] = v

    return line_graph
コード例 #37
0
ファイル: demo.plot.py プロジェクト: gree2/hobby
def demo_line():
    """demo_line"""
    g = nx.star_graph(10)
    l = nx.line_graph(g)
    plot(g)
コード例 #38
0
 def test_line_inverse_line_dgm(self):
     G = nx.dorogovtsev_goltsev_mendes_graph(4)
     H = nx.line_graph(G)
     J = nx.inverse_line_graph(H)
     assert_true(nx.is_isomorphic(G, J))
コード例 #39
0
ファイル: test_line.py プロジェクト: aparamon/networkx
 def test_create2(self):
     G = nx.Graph()
     G.add_edges_from([(0, 1), (1, 2), (2, 3)])
     L = nx.line_graph(G, create_using=nx.DiGraph())
     assert_edges_equal(L.edges(), [((0, 1), (1, 2)), ((1, 2), (2, 3))])
コード例 #40
0
ファイル: test_line.py プロジェクト: aparamon/networkx
 def test_digraph2(self):
     G = nx.DiGraph()
     G.add_edges_from([(0, 1), (1, 2), (2, 3)])
     L = nx.line_graph(G)
     assert_edges_equal(L.edges(), [((0, 1), (1, 2)), ((1, 2), (2, 3))])
コード例 #41
0
ファイル: test_line.py プロジェクト: aparamon/networkx
 def test_digraph1(self):
     G = nx.DiGraph()
     G.add_edges_from([(0, 1), (0, 2), (0, 3)])
     L = nx.line_graph(G)
     # no edge graph, but with nodes
     assert_equal(L.adj, {(0, 1): {}, (0, 2): {}, (0, 3): {}})
コード例 #42
0
ファイル: test_line.py プロジェクト: aparamon/networkx
 def test_cycle(self):
     G = nx.cycle_graph(5)
     L = nx.line_graph(G)
     assert_true(nx.is_isomorphic(L, G))
コード例 #43
0
G19.add_node(1,label="B")
G19.add_node(2,label="A")
G19.add_node(3,label="B")
G19.add_node(4,label="A")
G19.add_edge(1,2)
G19.add_edge(2,3)
G19.add_edge(3,4)
G19.add_edge(4,1)

generateIndependentEmbeddings(G10,G11)
generateIndependentEmbeddings(G14,G15)
generateIndependentEmbeddings(G18,G19)
print checkSubGraphIsomorphismWithLabels(G16,G17)
generateIndependentEmbeddings(G16,G17)

print nx.line_graph(G16).nodes()
print "20-21"
G20=nx.Graph()
G20.add_node(1,label="A")
G20.add_node(2,label="A")
G20.add_edge(1,2)

G21=nx.Graph()
G21.add_node(1,label="B")
G21.add_node(2,label="B")
G21.add_node(3,label="B")
G21.add_edge(1,2)
G21.add_edge(2,3)

checkSubGraphIsomorphismWithLabels(G20,G21)
print generateIndependentEmbeddings(G16,G17)
コード例 #44
0
ファイル: test_line.py プロジェクト: aparamon/networkx
 def test_star(self):
     G = nx.star_graph(5)
     L = nx.line_graph(G)
     assert_true(nx.is_isomorphic(L, nx.complete_graph(5)))
コード例 #45
0
 def test_line_inverse_line_complete(self):
     G = nx.complete_graph(10)
     H = nx.line_graph(G)
     J = nx.inverse_line_graph(H)
     assert_true(nx.is_isomorphic(G, J))
コード例 #46
0
def generate_line_graph(edge_labels,graph):
    g2=nx.DiGraph()
    ln_graph=nx.line_graph(graph)
    for edge in ln_graph.edges():
        g2.add_edge(edge_labels[edge[0]],edge_labels[edge[1]])
    return g2
コード例 #47
0
 def test_line_inverse_line_path(self):
     G = nx.path_graph(10)
     H = nx.line_graph(G)
     J = nx.inverse_line_graph(H)
     assert_true(nx.is_isomorphic(G, J))
コード例 #48
0
 def test_line_inverse_line_hypercube(self):
     G = nx.hypercube_graph(5)
     H = nx.line_graph(G)
     J = nx.inverse_line_graph(H)
     assert_true(nx.is_isomorphic(G, J))
コード例 #49
0
ファイル: test_line.py プロジェクト: aparamon/networkx
 def test_path(self):
     G = nx.path_graph(5)
     L = nx.line_graph(G)
     assert_true(nx.is_isomorphic(L, nx.path_graph(4)))
コード例 #50
0
 def test_line_inverse_line_star(self):
     G = nx.star_graph(20)
     H = nx.line_graph(G)
     J = nx.inverse_line_graph(H)
     assert_true(nx.is_isomorphic(G, J))
コード例 #51
0
 def test_line_inverse_line_multipartite(self):
     G = nx.complete_multipartite_graph(3, 4, 5)
     H = nx.line_graph(G)
     J = nx.inverse_line_graph(H)
     assert_true(nx.is_isomorphic(G, J))