def test_reverse_cuthill_mckee():
    # example graph from
    # http://www.boost.org/doc/libs/1_37_0/libs/graph/example/cuthill_mckee_ordering.cpp
    G = nx.Graph([(0, 3), (0, 5), (1, 2), (1, 4), (1, 6), (1, 9), (2, 3),
                  (2, 4), (3, 5), (3, 8), (4, 6), (5, 6), (5, 7), (6, 7)])
    rcm = list(reverse_cuthill_mckee_ordering(G, start=0))
    assert_equal(rcm, [9, 1, 4, 6, 7, 2, 8, 5, 3, 0])
    rcm = list(reverse_cuthill_mckee_ordering(G))
    assert_equal(rcm, [0, 8, 5, 7, 3, 6, 4, 2, 1, 9])
Example #2
0
def test_reverse_cuthill_mckee():
    # example nxgraph from
    # http://www.boost.org/doc/libs/1_37_0/libs/nxgraph/example/cuthill_mckee_ordering.cpp
    G = nx.Graph([(0,3),(0,5),(1,2),(1,4),(1,6),(1,9),(2,3),
                  (2,4),(3,5),(3,8),(4,6),(5,6),(5,7),(6,7)])
    rcm = list(reverse_cuthill_mckee_ordering(G,start=0))    
    assert_equal(rcm,[9, 1, 4, 6, 7, 2, 8, 5, 3, 0])
    rcm = list(reverse_cuthill_mckee_ordering(G))    
    assert_equal(rcm,[0, 8, 5, 7, 3, 6, 4, 2, 1, 9])
def blockdiag(mat):
    """
    Bandwidth reduction problem
    http://ciprian-zavoianu.blogspot.com/2009/01/project-bandwidth-reduction.html
    This gives block-diagonality of the matrix.
    """
    mat = csr_matrix(mat)
    graph = nx.from_scipy_sparse_matrix(mat)
    rcm = reverse_cuthill_mckee_ordering(graph)
    blockd = nx.to_scipy_sparse_matrix(graph, nodelist=list(rcm),
                                       format='csr').toarray()
    liste = list(reverse_cuthill_mckee_ordering(graph))
    print(liste)
    return blockd
Example #4
0
def test_result(G, opt_seq, idx_file):
    """
    Comparison between heuristic and exact methods
    larger  = List with exact and heuristc results where exact went larger than heuristic
              structure: [idx,exact_band,heuristic_band]
    same    = Num of exact results that went same than heuristic
    smaller = Num of exact results that went smaller than heuristic
    """
    larger = list()
    same = 0
    smaller = 0

    ########################################################################
    # NOTE: Graph[0] in idx_file==0 has no edges, so it is not considered
    start = 1 if idx_file==0 else 0
    
    # a main roda um for loop, a cada iteração pega um bloco de grafos e roda os testes
    # para todos os grafos daquele bloco, como no primeiro bloco, o indice 0 do primeiro bloco não tem arestas
    # ele ignora. A partir dos outros blocos de grafos idx1 em diante (1.txt, 2.txt..), 
    # não tem mais esse problema e começa do 0
    for i in range(start,len(G)):
        rcm = list(reverse_cuthill_mckee_ordering(G[i]))
        heuristic_band = get_bandwidth('RCM', G[i], nodelist=rcm)
        cp_band = get_bandwidth('CP', G[i], nodelist=opt_seq[i])

        if cp_band < heuristic_band:
            smaller += 1
        elif cp_band == heuristic_band:
            same += 1
        else:
            larger.append([i,cp_band,heuristic_band])
    
    return larger,same,smaller
Example #5
0
def test_rcm_alternate_heuristic():
    # example from
    G = nx.Graph([(0, 0),
                  (0, 4),
                  (1, 1),
                  (1, 2),
                  (1, 5),
                  (1, 7),
                  (2, 2),
                  (2, 4),
                  (3, 3),
                  (3, 6),
                  (4, 4),
                  (5, 5),
                  (5, 7),
                  (6, 6),
                  (7, 7)])

    answers = [[6, 3, 5, 7, 1, 2, 4, 0], [6, 3, 7, 5, 1, 2, 4, 0],
               [7, 5, 1, 2, 4, 0, 6, 3]]

    def smallest_degree(G):
        deg, node = min((d, n) for n, d in G.degree())
        return node
    rcm = list(reverse_cuthill_mckee_ordering(G, heuristic=smallest_degree))
    assert_true(rcm in answers)
Example #6
0
def test_rcm_alternate_heuristic():
    # example from
    G = nx.Graph([(0, 0),
                  (0, 4),
                  (1, 1),
                  (1, 2),
                  (1, 5),
                  (1, 7),
                  (2, 2),
                  (2, 4),
                  (3, 3),
                  (3, 6),
                  (4, 4),
                  (5, 5),
                  (5, 7),
                  (6, 6),
                  (7, 7)])

    answers = [[6, 3, 5, 7, 1, 2, 4, 0], [6, 3, 7, 5, 1, 2, 4, 0],
               [7, 5, 1, 2, 4, 0, 6, 3]]

    def smallest_degree(G):
        deg, node = min((d, n) for n, d in G.degree())
        return node
    rcm = list(reverse_cuthill_mckee_ordering(G, heuristic=smallest_degree))
    assert rcm in answers
Example #7
0
def test_rcm_alternate_heuristic():
    # example from
    G = nx.Graph([(0, 0),
                  (0, 4),
                  (1, 1),
                  (1, 2),
                  (1, 5),
                  (1, 7),
                  (2, 2),
                  (2, 4),
                  (3, 3),
                  (3, 6),
                  (4, 4),
                  (5, 5),
                  (5, 7),
                  (6, 6),
                  (7, 7)])

    answers = [[6, 3, 5, 7, 1, 2, 4, 0], [6, 3, 7, 5, 1, 2, 4, 0]]

    def smallest_degree(G):
        node, deg = min(G.degree().items(), key=lambda x: x[1])
        return node
    rcm = list(reverse_cuthill_mckee_ordering(G, heuristic=smallest_degree))
    assert_true(rcm in answers)
Example #8
0
def test_reverse_cuthill_mckee():
    # example graph from
    # http://www.boost.org/doc/libs/1_37_0/libs/graph/example/cuthill_mckee_ordering.cpp
    G = nx.Graph([(0, 3), (0, 5), (1, 2), (1, 4), (1, 6), (1, 9), (2, 3),
                  (2, 4), (3, 5), (3, 8), (4, 6), (5, 6), (5, 7), (6, 7)])
    rcm = list(reverse_cuthill_mckee_ordering(G))
    assert_true(rcm in [[0, 8, 5, 7, 3, 6, 2, 4, 1, 9],
                        [0, 8, 5, 7, 3, 6, 4, 2, 1, 9]])
Example #9
0
def _rcm_estimate(G, nodelist):
    """Estimate the Fiedler vector using the reverse Cuthill-McKee ordering."""
    G = G.subgraph(nodelist)
    order = reverse_cuthill_mckee_ordering(G)
    n = len(nodelist)
    index = dict(zip(nodelist, range(n)))
    x = ndarray(n, dtype=float)
    for i, u in enumerate(order):
        x[index[u]] = i
    x -= (n - 1) / 2.0
    return x
def _rcm_estimate(G, nodelist):
    """Estimate the Fiedler vector using the reverse Cuthill-McKee ordering.
    """
    G = G.subgraph(nodelist)
    order = reverse_cuthill_mckee_ordering(G)
    n = len(nodelist)
    index = dict(zip(nodelist, range(n)))
    x = ndarray(n, dtype=float)
    for i, u in enumerate(order):
        x[index[u]] = i
    x -= (n - 1) / 2.
    return x
def current_flow_betweenness_centrality_subset(G, sources, targets,
                                               normalized=True,
                                               weight=None,
                                               dtype=float, solver='lu'):
    r"""Compute current-flow betweenness centrality for subsets of nodes.

    Current-flow betweenness centrality uses an electrical current
    model for information spreading in contrast to betweenness
    centrality which uses shortest paths.

    Current-flow betweenness centrality is also known as
    random-walk betweenness centrality [2]_.

    Parameters
    ----------
    G : graph
      A NetworkX graph

    sources: list of nodes
      Nodes to use as sources for current

    targets: list of nodes
      Nodes to use as sinks for current

    normalized : bool, optional (default=True)
      If True the betweenness values are normalized by b=b/(n-1)(n-2) where
      n is the number of nodes in G.

    weight : string or None, optional (default=None)
      Key for edge data used as the edge weight.
      If None, then use 1 as each edge weight.

    dtype: data type (float)
      Default data type for internal matrices.
      Set to np.float32 for lower memory consumption.

    solver: string (default='lu')
       Type of linear solver to use for computing the flow matrix.
       Options are "full" (uses most memory), "lu" (recommended), and
       "cg" (uses least memory).

    Returns
    -------
    nodes : dictionary
       Dictionary of nodes with betweenness centrality as the value.

    See Also
    --------
    approximate_current_flow_betweenness_centrality
    betweenness_centrality
    edge_betweenness_centrality
    edge_current_flow_betweenness_centrality

    Notes
    -----
    Current-flow betweenness can be computed in $O(I(n-1)+mn \log n)$
    time [1]_, where $I(n-1)$ is the time needed to compute the
    inverse Laplacian.  For a full matrix this is $O(n^3)$ but using
    sparse methods you can achieve $O(nm{\sqrt k})$ where $k$ is the
    Laplacian matrix condition number.

    The space required is $O(nw)$ where $w$ is the width of the sparse
    Laplacian matrix.  Worse case is $w=n$ for $O(n^2)$.

    If the edges have a 'weight' attribute they will be used as
    weights in this algorithm.  Unspecified weights are set to 1.

    References
    ----------
    .. [1] Centrality Measures Based on Current Flow.
       Ulrik Brandes and Daniel Fleischer,
       Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
       LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
       http://algo.uni-konstanz.de/publications/bf-cmbcf-05.pdf

    .. [2] A measure of betweenness centrality based on random walks,
       M. E. J. Newman, Social Networks 27, 39-54 (2005).
    """
    from networkx.utils import reverse_cuthill_mckee_ordering
    try:
        import numpy as np
    except ImportError:
        raise ImportError('current_flow_betweenness_centrality requires NumPy ',
                          'http://scipy.org/')
    try:
        import scipy
    except ImportError:
        raise ImportError('current_flow_betweenness_centrality requires SciPy ',
                          'http://scipy.org/')
    if not nx.is_connected(G):
        raise nx.NetworkXError("Graph not connected.")
    n = G.number_of_nodes()
    ordering = list(reverse_cuthill_mckee_ordering(G))
    # make a copy with integer labels according to rcm ordering
    # this could be done without a copy if we really wanted to
    mapping = dict(zip(ordering, range(n)))
    H = nx.relabel_nodes(G, mapping)
    betweenness = dict.fromkeys(H, 0.0)  # b[v]=0 for v in H
    for row, (s, t) in flow_matrix_row(H, weight=weight, dtype=dtype,
                                       solver=solver):
        for ss in sources:
            i = mapping[ss]
            for tt in targets:
                j = mapping[tt]
                betweenness[s] += 0.5 * np.abs(row[i] - row[j])
                betweenness[t] += 0.5 * np.abs(row[i] - row[j])
    if normalized:
        nb = (n - 1.0) * (n - 2.0)  # normalization factor
    else:
        nb = 2.0
    for v in H:
        betweenness[v] = betweenness[v] / nb + 1.0 / (2 - n)
    return dict((ordering[k], v) for k, v in betweenness.items())
Example #12
0
Cuthill-McKee ordering of matrices

The reverse Cuthill-McKee algorithm gives a sparse matrix ordering that
reduces the matrix bandwidth.
"""

# Copyright (C) 2011-2019 by
# Author:    Aric Hagberg <*****@*****.**>
# BSD License
import networkx as nx
from networkx.utils import reverse_cuthill_mckee_ordering
import numpy as np

# build low-bandwidth numpy matrix
G = nx.grid_2d_graph(3, 3)
rcm = list(reverse_cuthill_mckee_ordering(G))
print("ordering", rcm)

print("unordered Laplacian matrix")
A = nx.laplacian_matrix(G)
x, y = np.nonzero(A)
#print("lower bandwidth:",(y-x).max())
#print("upper bandwidth:",(x-y).max())
print("bandwidth: %d" % ((y - x).max() + (x - y).max() + 1))
print(A)

B = nx.laplacian_matrix(G, nodelist=rcm)
print("low-bandwidth Laplacian matrix")
x, y = np.nonzero(B)
#print("lower bandwidth:",(y-x).max())
#print("upper bandwidth:",(x-y).max())
Example #13
0
def current_flow_closeness_centrality(G, normalized=True, weight='weight', 
                                      dtype=float, solver='lu'):
    """Compute current-flow closeness centrality for nodes.

    A variant of closeness centrality based on effective
    resistance between nodes in a network.  This metric
    is also known as information centrality.

    Parameters
    ----------
    G : graph
      A NetworkX graph 

    normalized : bool, optional
      If True the values are normalized by 1/(n-1) where n is the 
      number of nodes in G.

    dtype: data type (float)
      Default data type for internal matrices.
      Set to np.float32 for lower memory consumption.

    solver: string (default='lu')
       Type of linear solver to use for computing the flow matrix.
       Options are "full" (uses most memory), "lu" (recommended), and 
       "cg" (uses least memory).

    Returns
    -------
    nodes : dictionary
       Dictionary of nodes with current flow closeness centrality as the value.
        
    See Also
    --------
    closeness_centrality

    Notes
    -----
    The algorithm is from Brandes [1]_.

    See also [2]_ for the original definition of information centrality.

    References
    ----------
    .. [1] Ulrik Brandes and Daniel Fleischer,
       Centrality Measures Based on Current Flow. 
       Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05). 
       LNCS 3404, pp. 533-544. Springer-Verlag, 2005. 
       http://www.inf.uni-konstanz.de/algo/publications/bf-cmbcf-05.pdf

    .. [2] Stephenson, K. and Zelen, M.
       Rethinking centrality: Methods and examples.
       Social Networks. Volume 11, Issue 1, March 1989, pp. 1-37
       http://dx.doi.org/10.1016/0378-8733(89)90016-6
    """
    from networkx.utils import reverse_cuthill_mckee_ordering 
    try:
        import numpy as np
    except ImportError:
        raise ImportError('current_flow_closeness_centrality requires NumPy ',
                          'http://scipy.org/')
    try:
        import scipy 
    except ImportError:
        raise ImportError('current_flow_closeness_centrality requires SciPy ',
                          'http://scipy.org/')
    if G.is_directed():
        raise nx.NetworkXError('current_flow_closeness_centrality ',
                               'not defined for digraphs.')
    if G.is_directed():
        raise nx.NetworkXError(\
            "current_flow_closeness_centrality() not defined for digraphs.")
    if not nx.is_connected(G):
        raise nx.NetworkXError("Graph not connected.")
    solvername={"full" :FullInverseLaplacian,
                "lu": SuperLUInverseLaplacian,
                "cg": CGInverseLaplacian}
    n = G.number_of_nodes()
    ordering = list(reverse_cuthill_mckee_ordering(G))
    # make a copy with integer labels according to rcm ordering
    # this could be done without a copy if we really wanted to
    H = nx.relabel_nodes(G,dict(zip(ordering,range(n))))
    betweenness = dict.fromkeys(H,0.0) # b[v]=0 for v in H
    n = G.number_of_nodes()
    L = laplacian_sparse_matrix(H, nodelist=range(n), weight=weight, 
                                dtype=dtype, format='csc')
    C2 = solvername[solver](L, width=1, dtype=dtype) # initialize solver
    for v in H:
        col=C2.get_row(v)
        for w in H:
            betweenness[v]+=col[v]-2*col[w]
            betweenness[w]+=col[v]

    if normalized:
        nb=len(betweenness)-1.0
    else:
        nb=1.0
    for v in H:
        betweenness[v]=nb/(betweenness[v])
    return dict((ordering[k],float(v)) for k,v in betweenness.items())
def approximate_current_flow_betweenness_centrality(G, normalized=True,
                                                    weight='weight',
                                                    dtype=float, solver='full',
                                                    epsilon=0.5, kmax=10000):
    r"""Compute the approximate current-flow betweenness centrality for nodes.

    Approximates the current-flow betweenness centrality within absolute
    error of epsilon with high probability [1]_.


    Parameters
    ----------
    G : graph
      A NetworkX graph

    normalized : bool, optional (default=True)
      If True the betweenness values are normalized by 2/[(n-1)(n-2)] where
      n is the number of nodes in G.

    weight : string or None, optional (default='weight')
      Key for edge data used as the edge weight.
      If None, then use 1 as each edge weight.

    dtype: data type (float)
      Default data type for internal matrices.
      Set to np.float32 for lower memory consumption.

    solver: string (default='lu')
       Type of linear solver to use for computing the flow matrix.
       Options are "full" (uses most memory), "lu" (recommended), and
       "cg" (uses least memory).

    epsilon: float
        Absolute error tolerance.

    kmax: int
       Maximum number of sample node pairs to use for approximation.

    Returns
    -------
    nodes : dictionary
       Dictionary of nodes with betweenness centrality as the value.

    See Also
    --------
    current_flow_betweenness_centrality

    Notes
    -----
    The running time is `O((1/\epsilon^2)m{\sqrt k} \log n)`
    and the space required is `O(m)` for n nodes and m edges.

    If the edges have a 'weight' attribute they will be used as
    weights in this algorithm.  Unspecified weights are set to 1.

    References
    ----------
    .. [1] Ulrik Brandes and Daniel Fleischer:
       Centrality Measures Based on Current Flow.
       Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
       LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
       http://www.inf.uni-konstanz.de/algo/publications/bf-cmbcf-05.pdf
    """
    from networkx.utils import reverse_cuthill_mckee_ordering
    try:
        import numpy as np
    except ImportError:
        raise ImportError('current_flow_betweenness_centrality requires NumPy ',
                          'http://scipy.org/')
    try:
        from scipy import sparse
        from scipy.sparse import linalg
    except ImportError:
        raise ImportError('current_flow_betweenness_centrality requires SciPy ',
                          'http://scipy.org/')
    if G.is_directed():
        raise nx.NetworkXError('current_flow_betweenness_centrality() ',
                               'not defined for digraphs.')
    if not nx.is_connected(G):
        raise nx.NetworkXError("Graph not connected.")
    solvername={"full" :FullInverseLaplacian,
                "lu": SuperLUInverseLaplacian,
                "cg": CGInverseLaplacian}
    n = G.number_of_nodes()
    ordering = list(reverse_cuthill_mckee_ordering(G))
    # make a copy with integer labels according to rcm ordering
    # this could be done without a copy if we really wanted to
    H = nx.relabel_nodes(G,dict(zip(ordering,range(n))))
    L = laplacian_sparse_matrix(H, nodelist=range(n), weight=weight,
                                dtype=dtype, format='csc')
    C = solvername[solver](L, dtype=dtype) # initialize solver
    betweenness = dict.fromkeys(H,0.0)
    nb = (n-1.0)*(n-2.0) # normalization factor
    cstar = n*(n-1)/nb
    l = 1 # parameter in approximation, adjustable
    k = l*int(np.ceil((cstar/epsilon)**2*np.log(n)))
    if k > kmax:
        raise nx.NetworkXError('Number random pairs k>kmax (%d>%d) '%(k,kmax),
                               'Increase kmax or epsilon')
    cstar2k = cstar/(2*k)
    for i in range(k):
        s,t = random.sample(range(n),2)
        b = np.zeros(n, dtype=dtype)
        b[s] = 1
        b[t] = -1
        p = C.solve(b)
        for v in H:
            if v==s or v==t:
                continue
            for nbr in H[v]:
                w = H[v][nbr].get(weight,1.0)
                betweenness[v] += w*np.abs(p[v]-p[nbr])*cstar2k
    if normalized:
        factor = 1.0
    else:
        factor = nb/2.0
    # remap to original node names and "unnormalize" if required
    return dict((ordering[k],float(v*factor)) for k,v in betweenness.items())
Example #15
0
def get_bandwidth_nodelist_adjacency_rcm(Graph):
    rcm = list(reverse_cuthill_mckee_ordering(Graph))
    A = nx.adjacency_matrix(Graph, nodelist=rcm)
    L = nx.laplacian_matrix(nx.Graph(A))
    x, y = np.nonzero(L)
    return (x-y).max()
Example #16
0
def rcm(matrix):
    """Returns a reverse Cuthill-McKee reordering of the given matrix."""
    G = nx.from_scipy_sparse_matrix(matrix)
    rcm = reverse_cuthill_mckee_ordering(G)
    return nx.to_scipy_sparse_matrix(G, nodelist=list(rcm), format='csr')
Example #17
0
def rcm_min_degree(matrix):
    """Returns a reverse Cuthill-McKee reordering of the given matrix,
    using the minimum degree heuristic."""
    G = nx.from_scipy_sparse_matrix(matrix)
    rcm = reverse_cuthill_mckee_ordering(G, heuristic=min_degree_heuristic)
    return nx.to_scipy_sparse_matrix(G, nodelist=list(rcm), format='csr')
Example #18
0
def edge_current_flow_betweenness_centrality(G, normalized=True, 
                                             weight='weight',
                                             dtype=float, solver='lu'):
    """Compute current-flow betweenness centrality for edges.

    Current-flow betweenness centrality uses an electrical current
    model for information spreading in contrast to betweenness
    centrality which uses shortest paths.

    Current-flow betweenness centrality is also known as
    random-walk betweenness centrality [2]_.

    Parameters
    ----------
    G : graph
      A NetworkX graph 

    normalized : bool, optional (default=True)
      If True the betweenness values are normalized by b=b/(n-1)(n-2) where
      n is the number of nodes in G.

    weight : string or None, optional (default='weight')
      Key for edge data used as the edge weight.
      If None, then use 1 as each edge weight.

    dtype: data type (float)
      Default data type for internal matrices.
      Set to np.float32 for lower memory consumption.

    solver: string (default='lu')
       Type of linear solver to use for computing the flow matrix.
       Options are "full" (uses most memory), "lu" (recommended), and 
       "cg" (uses least memory).

    Returns
    -------
    nodes : dictionary
       Dictionary of edge tuples with betweenness centrality as the value.
        
    See Also
    --------
    betweenness_centrality
    edge_betweenness_centrality
    current_flow_betweenness_centrality

    Notes
    -----
    Current-flow betweenness can be computed in `O(I(n-1)+mn \log n)`
    time [1]_, where `I(n-1)` is the time needed to compute the 
    inverse Laplacian.  For a full matrix this is `O(n^3)` but using
    sparse methods you can achieve `O(nm{\sqrt k})` where `k` is the
    Laplacian matrix condition number.  

    The space required is `O(nw) where `w` is the width of the sparse
    Laplacian matrix.  Worse case is `w=n` for `O(n^2)`.

    If the edges have a 'weight' attribute they will be used as 
    weights in this algorithm.  Unspecified weights are set to 1.

    References
    ----------
    .. [1] Centrality Measures Based on Current Flow. 
       Ulrik Brandes and Daniel Fleischer,
       Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05). 
       LNCS 3404, pp. 533-544. Springer-Verlag, 2005. 
       http://www.inf.uni-konstanz.de/algo/publications/bf-cmbcf-05.pdf

    .. [2] A measure of betweenness centrality based on random walks, 
       M. E. J. Newman, Social Networks 27, 39-54 (2005).
    """
    from networkx.utils import reverse_cuthill_mckee_ordering 
    try:
        import numpy as np
    except ImportError:
        raise ImportError('current_flow_betweenness_centrality requires NumPy ',
                          'http://scipy.org/')
    try:
        import scipy 
    except ImportError:
        raise ImportError('current_flow_betweenness_centrality requires SciPy ',
                          'http://scipy.org/')
    if G.is_directed():
        raise nx.NetworkXError('edge_current_flow_betweenness_centrality ',
                               'not defined for digraphs.')
    if not nx.is_connected(G):
        raise nx.NetworkXError("Graph not connected.")
    n = G.number_of_nodes()
    ordering = list(reverse_cuthill_mckee_ordering(G))
    # make a copy with integer labels according to rcm ordering
    # this could be done without a copy if we really wanted to
    H = nx.relabel_nodes(G,dict(zip(ordering,range(n))))
    betweenness=(dict.fromkeys(H.edges(),0.0))
    if normalized:
        nb=(n-1.0)*(n-2.0) # normalization factor
    else:
        nb=2.0
    for row,(e) in flow_matrix_row(H, weight=weight, dtype=dtype, 
                                   solver=solver):
        pos=dict(zip(row.argsort()[::-1],range(1,n+1)))
        for i in range(n):
            betweenness[e]+=(i+1-pos[i])*row[i]
            betweenness[e]+=(n-i-pos[i])*row[i]
        betweenness[e]/=nb
    return dict(((ordering[s],ordering[t]),float(v))
                for (s,t),v in betweenness.items())
Example #19
0
def rcm(matrix):
    """Returns a reverse Cuthill-McKee reordering of the given matrix."""
    G = nx.from_scipy_sparse_matrix(matrix)
    rcm = reverse_cuthill_mckee_ordering(G)
    return nx.to_scipy_sparse_matrix(G, nodelist=list(rcm), format='csr')
Example #20
0
def rcm_min_degree(matrix):
    """Returns a reverse Cuthill-McKee reordering of the given matrix,
    using the minimum degree heuristic."""
    G = nx.from_scipy_sparse_matrix(matrix)
    rcm = reverse_cuthill_mckee_ordering(G, heuristic=min_degree_heuristic)
    return nx.to_scipy_sparse_matrix(G, nodelist=list(rcm), format='csr')
Example #21
0
def current_flow_closeness_centrality(G,
                                      weight=None,
                                      dtype=float,
                                      solver="lu"):
    """Compute current-flow closeness centrality for nodes.

    Current-flow closeness centrality is variant of closeness
    centrality based on effective resistance between nodes in
    a network. This metric is also known as information centrality.

    Parameters
    ----------
    G : graph
      A NetworkX graph.

    weight : None or string, optional (default=None)
      If None, all edge weights are considered equal.
      Otherwise holds the name of the edge attribute used as weight.
      The weight reflects the capacity or the strength of the
      edge.

    dtype: data type (default=float)
      Default data type for internal matrices.
      Set to np.float32 for lower memory consumption.

    solver: string (default='lu')
       Type of linear solver to use for computing the flow matrix.
       Options are "full" (uses most memory), "lu" (recommended), and
       "cg" (uses least memory).

    Returns
    -------
    nodes : dictionary
       Dictionary of nodes with current flow closeness centrality as the value.

    See Also
    --------
    closeness_centrality

    Notes
    -----
    The algorithm is from Brandes [1]_.

    See also [2]_ for the original definition of information centrality.

    References
    ----------
    .. [1] Ulrik Brandes and Daniel Fleischer,
       Centrality Measures Based on Current Flow.
       Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
       LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
       http://algo.uni-konstanz.de/publications/bf-cmbcf-05.pdf

    .. [2] Karen Stephenson and Marvin Zelen:
       Rethinking centrality: Methods and examples.
       Social Networks 11(1):1-37, 1989.
       https://doi.org/10.1016/0378-8733(89)90016-6
    """
    if not nx.is_connected(G):
        raise nx.NetworkXError("Graph not connected.")
    solvername = {
        "full": FullInverseLaplacian,
        "lu": SuperLUInverseLaplacian,
        "cg": CGInverseLaplacian,
    }
    n = G.number_of_nodes()
    ordering = list(reverse_cuthill_mckee_ordering(G))
    # make a copy with integer labels according to rcm ordering
    # this could be done without a copy if we really wanted to
    H = nx.relabel_nodes(G, dict(zip(ordering, range(n))))
    betweenness = dict.fromkeys(H, 0.0)  # b[v]=0 for v in H
    n = H.number_of_nodes()
    L = laplacian_sparse_matrix(H,
                                nodelist=range(n),
                                weight=weight,
                                dtype=dtype,
                                format="csc")
    C2 = solvername[solver](L, width=1, dtype=dtype)  # initialize solver
    for v in H:
        col = C2.get_row(v)
        for w in H:
            betweenness[v] += col[v] - 2 * col[w]
            betweenness[w] += col[v]
    for v in H:
        betweenness[v] = 1.0 / (betweenness[v])
    return {ordering[k]: float(v) for k, v in betweenness.items()}
Example #22
0
def approximate_current_flow_betweenness_centrality(G,
                                                    normalized=True,
                                                    weight=None,
                                                    dtype=float,
                                                    solver='full',
                                                    epsilon=0.5,
                                                    kmax=10000,
                                                    seed=None):
    r"""Compute the approximate current-flow betweenness centrality for nodes.

    Approximates the current-flow betweenness centrality within absolute
    error of epsilon with high probability [1]_.


    Parameters
    ----------
    G : graph
      A NetworkX graph

    normalized : bool, optional (default=True)
      If True the betweenness values are normalized by 2/[(n-1)(n-2)] where
      n is the number of nodes in G.

    weight : string or None, optional (default=None)
      Key for edge data used as the edge weight.
      If None, then use 1 as each edge weight.

    dtype : data type (float)
      Default data type for internal matrices.
      Set to np.float32 for lower memory consumption.

    solver : string (default='lu')
       Type of linear solver to use for computing the flow matrix.
       Options are "full" (uses most memory), "lu" (recommended), and
       "cg" (uses least memory).

    epsilon: float
        Absolute error tolerance.

    kmax: int
       Maximum number of sample node pairs to use for approximation.

    seed : integer, random_state, or None (default)
        Indicator of random number generation state.
        See :ref:`Randomness<randomness>`.

    Returns
    -------
    nodes : dictionary
       Dictionary of nodes with betweenness centrality as the value.

    See Also
    --------
    current_flow_betweenness_centrality

    Notes
    -----
    The running time is $O((1/\epsilon^2)m{\sqrt k} \log n)$
    and the space required is $O(m)$ for $n$ nodes and $m$ edges.

    If the edges have a 'weight' attribute they will be used as
    weights in this algorithm.  Unspecified weights are set to 1.

    References
    ----------
    .. [1] Ulrik Brandes and Daniel Fleischer:
       Centrality Measures Based on Current Flow.
       Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
       LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
       http://algo.uni-konstanz.de/publications/bf-cmbcf-05.pdf
    """
    try:
        import numpy as np
    except ImportError:
        raise ImportError(
            'current_flow_betweenness_centrality requires NumPy ',
            'http://scipy.org/')
    try:
        from scipy import sparse
        from scipy.sparse import linalg
    except ImportError:
        raise ImportError(
            'current_flow_betweenness_centrality requires SciPy ',
            'http://scipy.org/')
    if not nx.is_connected(G):
        raise nx.NetworkXError("Graph not connected.")
    solvername = {
        "full": FullInverseLaplacian,
        "lu": SuperLUInverseLaplacian,
        "cg": CGInverseLaplacian
    }
    n = G.number_of_nodes()
    ordering = list(reverse_cuthill_mckee_ordering(G))
    # make a copy with integer labels according to rcm ordering
    # this could be done without a copy if we really wanted to
    H = nx.relabel_nodes(G, dict(zip(ordering, range(n))))
    L = laplacian_sparse_matrix(H,
                                nodelist=range(n),
                                weight=weight,
                                dtype=dtype,
                                format='csc')
    C = solvername[solver](L, dtype=dtype)  # initialize solver
    betweenness = dict.fromkeys(H, 0.0)
    nb = (n - 1.0) * (n - 2.0)  # normalization factor
    cstar = n * (n - 1) / nb
    l = 1  # parameter in approximation, adjustable
    k = l * int(np.ceil((cstar / epsilon)**2 * np.log(n)))
    if k > kmax:
        msg = 'Number random pairs k>kmax (%d>%d) ' % (k, kmax)
        raise nx.NetworkXError(msg, 'Increase kmax or epsilon')
    cstar2k = cstar / (2 * k)
    for i in range(k):
        s, t = seed.sample(range(n), 2)
        b = np.zeros(n, dtype=dtype)
        b[s] = 1
        b[t] = -1
        p = C.solve(b)
        for v in H:
            if v == s or v == t:
                continue
            for nbr in H[v]:
                w = H[v][nbr].get(weight, 1.0)
                betweenness[v] += w * np.abs(p[v] - p[nbr]) * cstar2k
    if normalized:
        factor = 1.0
    else:
        factor = nb / 2.0
    # remap to original node names and "unnormalize" if required
    return dict(
        (ordering[k], float(v * factor)) for k, v in betweenness.items())
 def reverseCuthillMckee(grafo):
     lista_permutacao = list(reverse_cuthill_mckee_ordering(
         grafo))
     return nx.Graph(nx.adjacency_matrix(grafo, nodelist=lista_permutacao))
def current_flow_closeness_centrality(G, weight='weight',
                                      dtype=float, solver='lu'):
    """Compute current-flow closeness centrality for nodes.

    Current-flow closeness centrality is variant of closeness
    centrality based on effective resistance between nodes in
    a network. This metric is also known as information centrality.

    Parameters
    ----------
    G : graph
      A NetworkX graph

    dtype: data type (float)
      Default data type for internal matrices.
      Set to np.float32 for lower memory consumption.

    solver: string (default='lu')
       Type of linear solver to use for computing the flow matrix.
       Options are "full" (uses most memory), "lu" (recommended), and
       "cg" (uses least memory).

    Returns
    -------
    nodes : dictionary
       Dictionary of nodes with current flow closeness centrality as the value.

    See Also
    --------
    closeness_centrality

    Notes
    -----
    The algorithm is from Brandes [1]_.

    See also [2]_ for the original definition of information centrality.

    References
    ----------
    .. [1] Ulrik Brandes and Daniel Fleischer,
       Centrality Measures Based on Current Flow.
       Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
       LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
       http://www.inf.uni-konstanz.de/algo/publications/bf-cmbcf-05.pdf

    .. [2] Karen Stephenson and Marvin Zelen:
       Rethinking centrality: Methods and examples.
       Social Networks 11(1):1-37, 1989.
       http://dx.doi.org/10.1016/0378-8733(89)90016-6
    """
    from networkx.utils import reverse_cuthill_mckee_ordering

    import numpy as np
    import scipy

    if G.is_directed():
        raise nx.NetworkXError(
            "current_flow_closeness_centrality() not defined for digraphs.")
    if not nx.is_connected(G):
        raise nx.NetworkXError("Graph not connected.")
    solvername = {"full": FullInverseLaplacian,
                  "lu": SuperLUInverseLaplacian,
                  "cg": CGInverseLaplacian}
    n = G.number_of_nodes()
    ordering = list(reverse_cuthill_mckee_ordering(G))
    # make a copy with integer labels according to rcm ordering
    # this could be done without a copy if we really wanted to
    H = nx.relabel_nodes(G, dict(zip(ordering, range(n))))
    betweenness = dict.fromkeys(H, 0.0)  # b[v]=0 for v in H
    n = H.number_of_nodes()
    L = laplacian_sparse_matrix(H, nodelist=range(n), weight=weight,
                                dtype=dtype, format='csc')
    C2 = solvername[solver](L, width=1, dtype=dtype)  # initialize solver
    for v in H:
        col = C2.get_row(v)
        for w in H:
            betweenness[v] += col[v]-2*col[w]
            betweenness[w] += col[v]
    for v in H:
        betweenness[v] = 1.0 / (betweenness[v])
    return dict((ordering[k], float(v)) for k, v in betweenness.items())
Example #25
0
def edge_current_flow_betweenness_centrality(G,
                                             normalized=True,
                                             weight=None,
                                             dtype=float,
                                             solver="full"):
    r"""Compute current-flow betweenness centrality for edges.

    Current-flow betweenness centrality uses an electrical current
    model for information spreading in contrast to betweenness
    centrality which uses shortest paths.

    Current-flow betweenness centrality is also known as
    random-walk betweenness centrality [2]_.

    Parameters
    ----------
    G : graph
      A NetworkX graph

    normalized : bool, optional (default=True)
      If True the betweenness values are normalized by 2/[(n-1)(n-2)] where
      n is the number of nodes in G.

    weight : string or None, optional (default=None)
      Key for edge data used as the edge weight.
      If None, then use 1 as each edge weight.
      The weight reflects the capacity or the strength of the
      edge.

    dtype : data type (default=float)
      Default data type for internal matrices.
      Set to np.float32 for lower memory consumption.

    solver : string (default='full')
       Type of linear solver to use for computing the flow matrix.
       Options are "full" (uses most memory), "lu" (recommended), and
       "cg" (uses least memory).

    Returns
    -------
    nodes : dictionary
       Dictionary of edge tuples with betweenness centrality as the value.

    Raises
    ------
    NetworkXError
        The algorithm does not support DiGraphs.
        If the input graph is an instance of DiGraph class, NetworkXError
        is raised.

    See Also
    --------
    betweenness_centrality
    edge_betweenness_centrality
    current_flow_betweenness_centrality

    Notes
    -----
    Current-flow betweenness can be computed in $O(I(n-1)+mn \log n)$
    time [1]_, where $I(n-1)$ is the time needed to compute the
    inverse Laplacian.  For a full matrix this is $O(n^3)$ but using
    sparse methods you can achieve $O(nm{\sqrt k})$ where $k$ is the
    Laplacian matrix condition number.

    The space required is $O(nw)$ where $w$ is the width of the sparse
    Laplacian matrix.  Worse case is $w=n$ for $O(n^2)$.

    If the edges have a 'weight' attribute they will be used as
    weights in this algorithm.  Unspecified weights are set to 1.

    References
    ----------
    .. [1] Centrality Measures Based on Current Flow.
       Ulrik Brandes and Daniel Fleischer,
       Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
       LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
       https://doi.org/10.1007/978-3-540-31856-9_44

    .. [2] A measure of betweenness centrality based on random walks,
       M. E. J. Newman, Social Networks 27, 39-54 (2005).
    """
    from networkx.utils import reverse_cuthill_mckee_ordering

    if not nx.is_connected(G):
        raise nx.NetworkXError("Graph not connected.")
    n = G.number_of_nodes()
    ordering = list(reverse_cuthill_mckee_ordering(G))
    # make a copy with integer labels according to rcm ordering
    # this could be done without a copy if we really wanted to
    H = nx.relabel_nodes(G, dict(zip(ordering, range(n))))
    edges = (tuple(sorted((u, v))) for u, v in H.edges())
    betweenness = dict.fromkeys(edges, 0.0)
    if normalized:
        nb = (n - 1.0) * (n - 2.0)  # normalization factor
    else:
        nb = 2.0
    for row, (e) in flow_matrix_row(H,
                                    weight=weight,
                                    dtype=dtype,
                                    solver=solver):
        pos = dict(zip(row.argsort()[::-1], range(1, n + 1)))
        for i in range(n):
            betweenness[e] += (i + 1 - pos[i]) * row[i]
            betweenness[e] += (n - i - pos[i]) * row[i]
        betweenness[e] /= nb
    return {(ordering[s], ordering[t]): float(v)
            for (s, t), v in betweenness.items()}
def edge_current_flow_betweenness_centrality(G, normalized=True,
                                             weight='weight',
                                             dtype=float, solver='full'):
    """Compute current-flow betweenness centrality for edges.

    Current-flow betweenness centrality uses an electrical current
    model for information spreading in contrast to betweenness
    centrality which uses shortest paths.

    Current-flow betweenness centrality is also known as
    random-walk betweenness centrality [2]_.

    Parameters
    ----------
    G : graph
      A NetworkX graph

    normalized : bool, optional (default=True)
      If True the betweenness values are normalized by 2/[(n-1)(n-2)] where
      n is the number of nodes in G.

    weight : string or None, optional (default='weight')
      Key for edge data used as the edge weight.
      If None, then use 1 as each edge weight.

    dtype: data type (float)
      Default data type for internal matrices.
      Set to np.float32 for lower memory consumption.

    solver: string (default='lu')
       Type of linear solver to use for computing the flow matrix.
       Options are "full" (uses most memory), "lu" (recommended), and
       "cg" (uses least memory).

    Returns
    -------
    nodes : dictionary
       Dictionary of edge tuples with betweenness centrality as the value.

    See Also
    --------
    betweenness_centrality
    edge_betweenness_centrality
    current_flow_betweenness_centrality

    Notes
    -----
    Current-flow betweenness can be computed in `O(I(n-1)+mn \log n)`
    time [1]_, where `I(n-1)` is the time needed to compute the
    inverse Laplacian.  For a full matrix this is `O(n^3)` but using
    sparse methods you can achieve `O(nm{\sqrt k})` where `k` is the
    Laplacian matrix condition number.

    The space required is `O(nw) where `w` is the width of the sparse
    Laplacian matrix.  Worse case is `w=n` for `O(n^2)`.

    If the edges have a 'weight' attribute they will be used as
    weights in this algorithm.  Unspecified weights are set to 1.

    References
    ----------
    .. [1] Centrality Measures Based on Current Flow.
       Ulrik Brandes and Daniel Fleischer,
       Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
       LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
       http://www.inf.uni-konstanz.de/algo/publications/bf-cmbcf-05.pdf

    .. [2] A measure of betweenness centrality based on random walks,
       M. E. J. Newman, Social Networks 27, 39-54 (2005).
    """
    from networkx.utils import reverse_cuthill_mckee_ordering
    try:
        import numpy as np
    except ImportError:
        raise ImportError('current_flow_betweenness_centrality requires NumPy ',
                          'http://scipy.org/')
    try:
        import scipy
    except ImportError:
        raise ImportError('current_flow_betweenness_centrality requires SciPy ',
                          'http://scipy.org/')
    if G.is_directed():
        raise nx.NetworkXError('edge_current_flow_betweenness_centrality ',
                               'not defined for digraphs.')
    if not nx.is_connected(G):
        raise nx.NetworkXError("Graph not connected.")
    n = G.number_of_nodes()
    ordering = list(reverse_cuthill_mckee_ordering(G))
    # make a copy with integer labels according to rcm ordering
    # this could be done without a copy if we really wanted to
    H = nx.relabel_nodes(G,dict(zip(ordering,range(n))))
    betweenness=(dict.fromkeys(H.edges(),0.0))
    if normalized:
        nb=(n-1.0)*(n-2.0) # normalization factor
    else:
        nb=2.0
    for row,(e) in flow_matrix_row(H, weight=weight, dtype=dtype,
                                   solver=solver):
        pos=dict(zip(row.argsort()[::-1],range(1,n+1)))
        for i in range(n):
            betweenness[e]+=(i+1-pos[i])*row[i]
            betweenness[e]+=(n-i-pos[i])*row[i]
        betweenness[e]/=nb
    return dict(((ordering[s],ordering[t]),float(v))
                for (s,t),v in betweenness.items())
Example #27
0

# result file name
result_file = sys.argv[1].split('/')[-1].split('.')[0] + '_result.txt'

# loading graphs
G = nx.read_graph6(sys.argv[1])

# loading permuts
with open(sys.argv[2]) as f:
    a = f.readlines()

for i in range(1, 26):

    # initial rcm and bandwidth
    initial_rcm = list(reverse_cuthill_mckee_ordering(G[i]))
    initial_band = get_bandwidth(G[i], initial_rcm)
    print('Reduced bandwidth of G{}: {} with rcm {}'.format(
        i, initial_band, initial_rcm))

    # initializing controllers
    min_rcm = initial_rcm
    min_band = initial_band
    same_band = 0
    smaller_band = 0
    start = time.time()
    total_start = time.time()

    count = 0
    for r in a:
        rcm = [int(n) for n in r.split(';')]
def current_flow_betweenness_centrality_subset(G,
                                               sources,
                                               targets,
                                               normalized=True,
                                               weight=None,
                                               dtype=float,
                                               solver='lu'):
    r"""Compute current-flow betweenness centrality for subsets of nodes.

    Current-flow betweenness centrality uses an electrical current
    model for information spreading in contrast to betweenness
    centrality which uses shortest paths.

    Current-flow betweenness centrality is also known as
    random-walk betweenness centrality [2]_.

    Parameters
    ----------
    G : graph
      A NetworkX graph

    sources: list of nodes
      Nodes to use as sources for current

    targets: list of nodes
      Nodes to use as sinks for current

    normalized : bool, optional (default=True)
      If True the betweenness values are normalized by b=b/(n-1)(n-2) where
      n is the number of nodes in G.

    weight : string or None, optional (default=None)
      Key for edge data used as the edge weight.
      If None, then use 1 as each edge weight.

    dtype: data type (float)
      Default data type for internal matrices.
      Set to np.float32 for lower memory consumption.

    solver: string (default='lu')
       Type of linear solver to use for computing the flow matrix.
       Options are "full" (uses most memory), "lu" (recommended), and
       "cg" (uses least memory).

    Returns
    -------
    nodes : dictionary
       Dictionary of nodes with betweenness centrality as the value.

    See Also
    --------
    approximate_current_flow_betweenness_centrality
    betweenness_centrality
    edge_betweenness_centrality
    edge_current_flow_betweenness_centrality

    Notes
    -----
    Current-flow betweenness can be computed in $O(I(n-1)+mn \log n)$
    time [1]_, where $I(n-1)$ is the time needed to compute the
    inverse Laplacian.  For a full matrix this is $O(n^3)$ but using
    sparse methods you can achieve $O(nm{\sqrt k})$ where $k$ is the
    Laplacian matrix condition number.

    The space required is $O(nw)$ where $w$ is the width of the sparse
    Laplacian matrix.  Worse case is $w=n$ for $O(n^2)$.

    If the edges have a 'weight' attribute they will be used as
    weights in this algorithm.  Unspecified weights are set to 1.

    References
    ----------
    .. [1] Centrality Measures Based on Current Flow.
       Ulrik Brandes and Daniel Fleischer,
       Proc. 22nd Symp. Theoretical Aspects of Computer Science (STACS '05).
       LNCS 3404, pp. 533-544. Springer-Verlag, 2005.
       http://algo.uni-konstanz.de/publications/bf-cmbcf-05.pdf

    .. [2] A measure of betweenness centrality based on random walks,
       M. E. J. Newman, Social Networks 27, 39-54 (2005).
    """
    from networkx.utils import reverse_cuthill_mckee_ordering
    try:
        import numpy as np
    except ImportError:
        raise ImportError(
            'current_flow_betweenness_centrality requires NumPy ',
            'http://scipy.org/')
    try:
        import scipy
    except ImportError:
        raise ImportError(
            'current_flow_betweenness_centrality requires SciPy ',
            'http://scipy.org/')
    if not nx.is_connected(G):
        raise nx.NetworkXError("Graph not connected.")
    n = G.number_of_nodes()
    ordering = list(reverse_cuthill_mckee_ordering(G))
    # make a copy with integer labels according to rcm ordering
    # this could be done without a copy if we really wanted to
    mapping = dict(zip(ordering, range(n)))
    H = nx.relabel_nodes(G, mapping)
    betweenness = dict.fromkeys(H, 0.0)  # b[v]=0 for v in H
    for row, (s, t) in flow_matrix_row(H,
                                       weight=weight,
                                       dtype=dtype,
                                       solver=solver):
        for ss in sources:
            i = mapping[ss]
            for tt in targets:
                j = mapping[tt]
                betweenness[s] += 0.5 * np.abs(row[i] - row[j])
                betweenness[t] += 0.5 * np.abs(row[i] - row[j])
    if normalized:
        nb = (n - 1.0) * (n - 2.0)  # normalization factor
    else:
        nb = 2.0
    for v in H:
        betweenness[v] = betweenness[v] / nb + 1.0 / (2 - n)
    return dict((ordering[k], v) for k, v in betweenness.items())
Example #29
0
 def _get_reduced_bandwidth(self, graph):
     if len(graph.edges()) == 0:
         return 0
     rcm = list(reverse_cuthill_mckee_ordering(graph))
     return self._get_bandwidth(graph, rcm)